[ 451.433995] env[61898]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61898) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 451.433995] env[61898]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61898) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 451.433995] env[61898]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61898) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 451.434930] env[61898]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 451.536266] env[61898]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61898) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 451.549713] env[61898]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61898) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 452.154534] env[61898]: INFO nova.virt.driver [None req-dd98421c-fc12-4b1f-8878-6df539f71490 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 452.226802] env[61898]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 452.227016] env[61898]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 452.227092] env[61898]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61898) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 455.366028] env[61898]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-0ca51984-a8c9-4c8b-bdf3-65be2c04bf90 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.382344] env[61898]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61898) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 455.382528] env[61898]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-30207061-1981-4f3c-a418-f11f4f526583 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.414660] env[61898]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 51780. [ 455.414812] env[61898]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.188s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 455.415356] env[61898]: INFO nova.virt.vmwareapi.driver [None req-dd98421c-fc12-4b1f-8878-6df539f71490 None None] VMware vCenter version: 7.0.3 [ 455.418718] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd0569b-baee-468e-ba9d-7929d45ad19e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.435567] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315b312c-cded-4ef0-a246-2dda4b0026b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.441399] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cb6325-2350-4ae6-b42f-e678bd75fe27 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.447892] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b937411-d934-4aeb-9cc2-bc7688c25075 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.460648] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bdefe6d-7ffa-48b5-b829-4db077e2baeb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.466414] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531da9eb-2bc5-4401-94ec-cb773b6b4607 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.496073] env[61898]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-378f9189-5490-4fd6-9225-fb2d712170c9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 455.500896] env[61898]: DEBUG nova.virt.vmwareapi.driver [None req-dd98421c-fc12-4b1f-8878-6df539f71490 None None] Extension org.openstack.compute already exists. {{(pid=61898) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:227}} [ 455.503507] env[61898]: INFO nova.compute.provider_config [None req-dd98421c-fc12-4b1f-8878-6df539f71490 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 456.007240] env[61898]: DEBUG nova.context [None req-dd98421c-fc12-4b1f-8878-6df539f71490 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),cecb9197-778f-409d-a51d-f8780f37088b(cell1) {{(pid=61898) load_cells /opt/stack/nova/nova/context.py:464}} [ 456.009480] env[61898]: DEBUG oslo_concurrency.lockutils [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 456.009719] env[61898]: DEBUG oslo_concurrency.lockutils [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 456.010418] env[61898]: DEBUG oslo_concurrency.lockutils [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 456.010861] env[61898]: DEBUG oslo_concurrency.lockutils [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Acquiring lock "cecb9197-778f-409d-a51d-f8780f37088b" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 456.011061] env[61898]: DEBUG oslo_concurrency.lockutils [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Lock "cecb9197-778f-409d-a51d-f8780f37088b" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 456.012182] env[61898]: DEBUG oslo_concurrency.lockutils [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Lock "cecb9197-778f-409d-a51d-f8780f37088b" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 456.032623] env[61898]: INFO dbcounter [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Registered counter for database nova_cell0 [ 456.041237] env[61898]: INFO dbcounter [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Registered counter for database nova_cell1 [ 456.044496] env[61898]: DEBUG oslo_db.sqlalchemy.engines [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61898) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 456.044842] env[61898]: DEBUG oslo_db.sqlalchemy.engines [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61898) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 456.050047] env[61898]: ERROR nova.db.main.api [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 456.050047] env[61898]: result = function(*args, **kwargs) [ 456.050047] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 456.050047] env[61898]: return func(*args, **kwargs) [ 456.050047] env[61898]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 456.050047] env[61898]: result = fn(*args, **kwargs) [ 456.050047] env[61898]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 456.050047] env[61898]: return f(*args, **kwargs) [ 456.050047] env[61898]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 456.050047] env[61898]: return db.service_get_minimum_version(context, binaries) [ 456.050047] env[61898]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 456.050047] env[61898]: _check_db_access() [ 456.050047] env[61898]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 456.050047] env[61898]: stacktrace = ''.join(traceback.format_stack()) [ 456.050047] env[61898]: [ 456.050896] env[61898]: ERROR nova.db.main.api [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 456.050896] env[61898]: result = function(*args, **kwargs) [ 456.050896] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 456.050896] env[61898]: return func(*args, **kwargs) [ 456.050896] env[61898]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 456.050896] env[61898]: result = fn(*args, **kwargs) [ 456.050896] env[61898]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 456.050896] env[61898]: return f(*args, **kwargs) [ 456.050896] env[61898]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 456.050896] env[61898]: return db.service_get_minimum_version(context, binaries) [ 456.050896] env[61898]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 456.050896] env[61898]: _check_db_access() [ 456.050896] env[61898]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 456.050896] env[61898]: stacktrace = ''.join(traceback.format_stack()) [ 456.050896] env[61898]: [ 456.051497] env[61898]: WARNING nova.objects.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Failed to get minimum service version for cell cecb9197-778f-409d-a51d-f8780f37088b [ 456.051497] env[61898]: WARNING nova.objects.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 456.051852] env[61898]: DEBUG oslo_concurrency.lockutils [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Acquiring lock "singleton_lock" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 456.052017] env[61898]: DEBUG oslo_concurrency.lockutils [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Acquired lock "singleton_lock" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 456.052271] env[61898]: DEBUG oslo_concurrency.lockutils [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Releasing lock "singleton_lock" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 456.052594] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Full set of CONF: {{(pid=61898) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 456.052735] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ******************************************************************************** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 456.052859] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Configuration options gathered from: {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 456.052993] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 456.053202] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 456.053327] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ================================================================================ {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 456.053533] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] allow_resize_to_same_host = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.053701] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] arq_binding_timeout = 300 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.053830] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] backdoor_port = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.053952] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] backdoor_socket = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.054127] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] block_device_allocate_retries = 60 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.054285] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] block_device_allocate_retries_interval = 3 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.054448] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cert = self.pem {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.054606] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.054770] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute_monitors = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.054936] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] config_dir = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.055117] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] config_drive_format = iso9660 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.055250] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.055412] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] config_source = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.055576] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] console_host = devstack {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.055766] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] control_exchange = nova {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.055925] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cpu_allocation_ratio = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.056104] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] daemon = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.056276] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] debug = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.056432] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] default_access_ip_network_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.056704] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] default_availability_zone = nova {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.056874] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] default_ephemeral_format = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.057074] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] default_green_pool_size = 1000 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.057330] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.057496] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] default_schedule_zone = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.057651] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] disk_allocation_ratio = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.057836] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] enable_new_services = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.058031] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] enabled_apis = ['osapi_compute'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.058201] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] enabled_ssl_apis = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.058361] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] flat_injected = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.058517] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] force_config_drive = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.058677] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] force_raw_images = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.058840] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] graceful_shutdown_timeout = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.059062] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] heal_instance_info_cache_interval = 60 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.059223] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] host = cpu-1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.059401] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.059563] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.059722] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.059931] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.060110] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] instance_build_timeout = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.060270] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] instance_delete_interval = 300 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.060432] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] instance_format = [instance: %(uuid)s] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.060594] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] instance_name_template = instance-%08x {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.060757] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] instance_usage_audit = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.060924] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] instance_usage_audit_period = month {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.061099] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.061263] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.061422] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] internal_service_availability_zone = internal {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.061573] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] key = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.061729] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] live_migration_retry_count = 30 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.061893] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] log_color = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.062065] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] log_config_append = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.062231] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.062386] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] log_dir = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.062541] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] log_file = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.062664] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] log_options = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.062821] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] log_rotate_interval = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.062984] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] log_rotate_interval_type = days {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.063157] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] log_rotation_type = none {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.063284] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.063407] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.063567] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.063725] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.063855] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.064025] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] long_rpc_timeout = 1800 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.064176] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] max_concurrent_builds = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.064331] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] max_concurrent_live_migrations = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.064488] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] max_concurrent_snapshots = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.064642] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] max_local_block_devices = 3 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.064796] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] max_logfile_count = 30 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.064950] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] max_logfile_size_mb = 200 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.065119] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] maximum_instance_delete_attempts = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.065284] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] metadata_listen = 0.0.0.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.065451] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] metadata_listen_port = 8775 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.065637] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] metadata_workers = 2 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.065817] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] migrate_max_retries = -1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.065984] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] mkisofs_cmd = genisoimage {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.066202] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.066334] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] my_ip = 10.180.1.21 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.066684] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.066785] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] network_allocate_retries = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.066985] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.067166] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.067328] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] osapi_compute_listen_port = 8774 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.067491] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] osapi_compute_unique_server_name_scope = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.067657] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] osapi_compute_workers = 2 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.067853] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] password_length = 12 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.068031] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] periodic_enable = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.068194] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] periodic_fuzzy_delay = 60 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.068357] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] pointer_model = usbtablet {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.068518] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] preallocate_images = none {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.068675] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] publish_errors = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.068813] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] pybasedir = /opt/stack/nova {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.068981] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ram_allocation_ratio = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.069164] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] rate_limit_burst = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.069348] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] rate_limit_except_level = CRITICAL {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.069469] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] rate_limit_interval = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.069633] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] reboot_timeout = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.069776] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] reclaim_instance_interval = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.069928] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] record = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.070102] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] reimage_timeout_per_gb = 60 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.070420] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] report_interval = 120 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.070420] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] rescue_timeout = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.070565] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] reserved_host_cpus = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.070716] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] reserved_host_disk_mb = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.070897] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] reserved_host_memory_mb = 512 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.071087] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] reserved_huge_pages = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.071252] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] resize_confirm_window = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.071407] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] resize_fs_using_block_device = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.071563] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] resume_guests_state_on_host_boot = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.071725] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.071883] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] rpc_response_timeout = 60 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.072049] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] run_external_periodic_tasks = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.072216] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] running_deleted_instance_action = reap {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.072372] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.072527] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] running_deleted_instance_timeout = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.072683] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] scheduler_instance_sync_interval = 120 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.072844] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] service_down_time = 720 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.073009] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] servicegroup_driver = db {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.073166] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] shell_completion = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.073321] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] shelved_offload_time = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.073475] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] shelved_poll_interval = 3600 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.073637] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] shutdown_timeout = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.073794] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] source_is_ipv6 = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.073948] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ssl_only = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.074207] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.074369] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] sync_power_state_interval = 600 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.074526] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] sync_power_state_pool_size = 1000 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.074687] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] syslog_log_facility = LOG_USER {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.074840] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] tempdir = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.074995] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] timeout_nbd = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.075171] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] transport_url = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.075327] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] update_resources_interval = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.075479] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] use_cow_images = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.075657] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] use_eventlog = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.075833] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] use_journal = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.075984] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] use_json = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.076153] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] use_rootwrap_daemon = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.076308] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] use_stderr = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.076459] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] use_syslog = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.076607] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vcpu_pin_set = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.076859] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plugging_is_fatal = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.077063] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plugging_timeout = 300 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.077236] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] virt_mkfs = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.077396] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] volume_usage_poll_interval = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.077554] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] watch_log_file = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.077756] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] web = /usr/share/spice-html5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.077931] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.078112] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.078279] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.078448] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_concurrency.disable_process_locking = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.078731] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.078910] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079084] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079262] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079422] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079588] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079766] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.auth_strategy = keystone {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.079931] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.compute_link_prefix = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080117] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080293] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.dhcp_domain = novalocal {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080462] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.enable_instance_password = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080622] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.glance_link_prefix = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080786] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.080955] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081132] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.instance_list_per_project_cells = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081295] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.list_records_by_skipping_down_cells = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081454] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.local_metadata_per_cell = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081619] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.max_limit = 1000 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081786] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.metadata_cache_expiration = 15 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.081957] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.neutron_default_tenant_id = default {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.082140] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.response_validation = warn {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.082305] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.use_neutron_default_nets = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.082472] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.082631] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.082796] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.082963] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.083145] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.vendordata_dynamic_targets = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.083306] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.vendordata_jsonfile_path = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.083482] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.083668] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.backend = dogpile.cache.memcached {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.083832] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.backend_argument = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.083998] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.config_prefix = cache.oslo {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084177] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.dead_timeout = 60.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084336] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.debug_cache_backend = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084497] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.enable_retry_client = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084656] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.enable_socket_keepalive = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084823] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.enabled = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.084985] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.enforce_fips_mode = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.085161] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.expiration_time = 600 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.085323] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.hashclient_retry_attempts = 2 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.085485] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.085666] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.memcache_dead_retry = 300 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.085833] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.memcache_password = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086009] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086172] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086333] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.memcache_pool_maxsize = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086489] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086646] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.memcache_sasl_enabled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.086891] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.087099] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.087267] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.memcache_username = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.087434] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.proxies = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.087600] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.redis_db = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.087793] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.redis_password = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.087975] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088169] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088343] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.redis_server = localhost:6379 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088508] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.redis_socket_timeout = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088666] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.redis_username = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088828] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.retry_attempts = 2 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.088993] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.retry_delay = 0.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089169] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.socket_keepalive_count = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089359] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.socket_keepalive_idle = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089496] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.socket_keepalive_interval = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089643] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.tls_allowed_ciphers = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089796] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.tls_cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.089950] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.tls_certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090123] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.tls_enabled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090279] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cache.tls_keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090447] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.auth_section = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090615] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.auth_type = password {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090787] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.090960] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091129] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091289] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091445] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.cross_az_attach = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091601] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.debug = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091754] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.endpoint_template = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.091916] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.http_retries = 3 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092085] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092240] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092406] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.os_region_name = RegionOne {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092565] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092717] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cinder.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.092887] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093051] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.cpu_dedicated_set = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093213] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.cpu_shared_set = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093373] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.image_type_exclude_list = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093533] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093692] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.093853] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094016] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094184] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094348] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.resource_provider_association_refresh = 300 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094507] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094666] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.shutdown_retry_interval = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.094843] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095031] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] conductor.workers = 2 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095212] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] console.allowed_origins = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095372] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] console.ssl_ciphers = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095538] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] console.ssl_minimum_version = default {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095742] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] consoleauth.enforce_session_timeout = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.095910] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] consoleauth.token_ttl = 600 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096095] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096256] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096414] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096568] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.connect_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096723] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.connect_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.096877] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.endpoint_override = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097134] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097307] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097465] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.max_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097618] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.min_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097809] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.region_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.097976] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.retriable_status_codes = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098145] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.service_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098312] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.service_type = accelerator {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098469] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098622] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.status_code_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098818] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.status_code_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.098986] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.099179] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.099339] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] cyborg.version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.099516] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.backend = sqlalchemy {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.099680] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.connection = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.099848] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.connection_debug = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100019] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.connection_parameters = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100189] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.connection_recycle_time = 3600 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100351] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.connection_trace = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100512] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.db_inc_retry_interval = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100676] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.db_max_retries = 20 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100838] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.db_max_retry_interval = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.100999] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.db_retry_interval = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101175] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.max_overflow = 50 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101335] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.max_pool_size = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101493] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.max_retries = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101657] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101815] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.mysql_wsrep_sync_wait = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.101973] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.pool_timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102146] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.retry_interval = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102301] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.slave_connection = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102459] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.sqlite_synchronous = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102617] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] database.use_db_reconnect = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102789] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.backend = sqlalchemy {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.102987] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.connection = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103129] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.connection_debug = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103296] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.connection_parameters = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103456] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.connection_recycle_time = 3600 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103616] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.connection_trace = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103773] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.db_inc_retry_interval = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.103938] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.db_max_retries = 20 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104713] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.db_max_retry_interval = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104713] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.db_retry_interval = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104713] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.max_overflow = 50 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104713] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.max_pool_size = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104713] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.max_retries = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.104903] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105041] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105206] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.pool_timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105371] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.retry_interval = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105535] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.slave_connection = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105725] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] api_database.sqlite_synchronous = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.105907] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] devices.enabled_mdev_types = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.106095] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.106269] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.106430] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ephemeral_storage_encryption.enabled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.106592] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.106770] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.api_servers = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.106934] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.107183] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.107371] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.107534] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.connect_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.107725] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.connect_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.107884] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.debug = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.108068] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.default_trusted_certificate_ids = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.108237] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.enable_certificate_validation = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.108403] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.enable_rbd_download = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.108566] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.endpoint_override = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.108729] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.108893] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109064] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.max_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109226] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.min_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109391] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.num_retries = 3 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109557] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.rbd_ceph_conf = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109722] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.rbd_connect_timeout = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.109896] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.rbd_pool = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110069] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.rbd_user = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110230] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.region_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110393] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.retriable_status_codes = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110550] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.service_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110717] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.service_type = image {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.110886] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.111056] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.status_code_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.111218] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.status_code_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.111375] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.111553] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.111717] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.verify_glance_signatures = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.111877] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] glance.version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112054] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] guestfs.debug = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112224] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.auth_section = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112389] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.auth_type = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112547] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112708] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.112873] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113046] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.connect_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113211] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.connect_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113367] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.endpoint_override = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113529] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113686] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.113845] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.max_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.114008] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.min_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.114173] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.region_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.114334] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.retriable_status_codes = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.114492] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.service_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.114661] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.service_type = shared-file-system {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.114829] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.share_apply_policy_timeout = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.114992] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.115165] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.status_code_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.115324] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.status_code_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.115481] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.115689] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.115860] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] manila.version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.116043] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] mks.enabled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.116409] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.116600] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] image_cache.manager_interval = 2400 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.116769] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] image_cache.precache_concurrency = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.116941] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] image_cache.remove_unused_base_images = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.117189] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.117392] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.117573] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] image_cache.subdirectory_name = _base {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.117779] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.api_max_retries = 60 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.117956] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.api_retry_interval = 2 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118131] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.auth_section = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118296] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.auth_type = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118454] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118608] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118770] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.118933] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.conductor_group = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119104] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.connect_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119263] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.connect_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119421] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.endpoint_override = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119582] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119741] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.119900] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.max_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.120067] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.min_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.120239] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.peer_list = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.120397] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.region_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.120556] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.retriable_status_codes = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.120720] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.serial_console_state_timeout = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.120878] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.service_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121055] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.service_type = baremetal {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121218] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.shard = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121382] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121541] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.status_code_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121699] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.status_code_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.121859] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122049] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122212] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ironic.version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122391] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122560] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] key_manager.fixed_key = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122738] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.122898] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.barbican_api_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123067] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.barbican_endpoint = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123242] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.barbican_endpoint_type = public {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123400] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.barbican_region_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123555] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123712] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.123882] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124048] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124207] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124369] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.number_of_retries = 60 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124531] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.retry_delay = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124691] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.send_service_user_token = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.124856] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125024] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125209] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.verify_ssl = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125344] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican.verify_ssl_path = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125510] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican_service_user.auth_section = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125696] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican_service_user.auth_type = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.125864] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican_service_user.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126031] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican_service_user.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126199] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican_service_user.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126357] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican_service_user.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126513] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican_service_user.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126672] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican_service_user.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126830] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] barbican_service_user.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.126995] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vault.approle_role_id = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.127168] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vault.approle_secret_id = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.127426] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vault.kv_mountpoint = secret {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.127599] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vault.kv_path = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.127803] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vault.kv_version = 2 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.127970] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vault.namespace = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.128149] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vault.root_token_id = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.128307] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vault.ssl_ca_crt_file = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.128476] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vault.timeout = 60.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.128639] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vault.use_ssl = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.128829] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129039] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.auth_section = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129211] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.auth_type = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129373] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129532] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129698] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.129860] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.connect_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130029] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.connect_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130192] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.endpoint_override = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130355] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130513] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130669] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.max_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130827] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.min_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.130985] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.region_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131157] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.retriable_status_codes = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131313] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.service_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131484] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.service_type = identity {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131650] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131810] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.status_code_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.131971] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.status_code_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.132146] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.132328] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.132488] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] keystone.version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.132686] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.connection_uri = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.132847] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.cpu_mode = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133019] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133191] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.cpu_models = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133358] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.cpu_power_governor_high = performance {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133521] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133682] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.cpu_power_management = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.133849] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134069] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.device_detach_attempts = 8 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134185] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.device_detach_timeout = 20 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134348] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.disk_cachemodes = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134504] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.disk_prefix = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134677] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.enabled_perf_events = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.134842] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.file_backed_memory = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135045] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.gid_maps = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135242] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.hw_disk_discard = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135408] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.hw_machine_type = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135581] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.images_rbd_ceph_conf = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135761] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.135925] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.136107] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.images_rbd_glance_store_name = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.136279] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.images_rbd_pool = rbd {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.136445] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.images_type = default {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.136603] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.images_volume_group = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.136763] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.inject_key = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.136927] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.inject_partition = -2 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.137097] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.inject_password = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.137260] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.iscsi_iface = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.137510] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.iser_use_multipath = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.137712] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.137885] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138066] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_downtime = 500 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138233] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138393] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138554] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_inbound_addr = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138717] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.138914] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139100] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_scheme = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139278] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_timeout_action = abort {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139444] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_tunnelled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139602] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_uri = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139765] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.live_migration_with_native_tls = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.139925] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.max_queues = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.140101] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.140341] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.140504] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.nfs_mount_options = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.140808] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.140984] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141167] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141331] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141495] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141657] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.num_pcie_ports = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141822] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.141989] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.pmem_namespaces = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.142160] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.quobyte_client_cfg = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.142446] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.142618] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.142784] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.142948] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143117] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.rbd_secret_uuid = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143275] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.rbd_user = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143436] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143605] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143763] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.rescue_image_id = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.143921] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.rescue_kernel_id = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.144089] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.rescue_ramdisk_id = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.144259] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.144412] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.rx_queue_size = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.144576] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.smbfs_mount_options = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.144848] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.145032] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.snapshot_compression = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.145196] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.snapshot_image_format = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.145411] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.145577] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.sparse_logical_volumes = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.145745] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.swtpm_enabled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.145907] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.swtpm_group = tss {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146081] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.swtpm_user = tss {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146250] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.sysinfo_serial = unique {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146408] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.tb_cache_size = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146563] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.tx_queue_size = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146725] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.uid_maps = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.146888] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.use_virtio_for_bridges = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.147064] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.virt_type = kvm {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.147232] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.volume_clear = zero {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.147394] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.volume_clear_size = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.147653] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.volume_use_multipath = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.147849] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.vzstorage_cache_path = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.148037] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.148210] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.148375] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.148539] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.148830] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.149042] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.vzstorage_mount_user = stack {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.149218] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.149393] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.auth_section = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.149566] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.auth_type = password {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.149727] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.149888] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150066] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150227] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.connect_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150385] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.connect_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150554] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.default_floating_pool = public {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150711] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.endpoint_override = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.150875] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.extension_sync_interval = 600 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151045] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.http_retries = 3 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151211] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151368] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151525] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.max_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151694] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.151877] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.min_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152026] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.ovs_bridge = br-int {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152198] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.physnets = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152367] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.region_name = RegionOne {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152530] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.retriable_status_codes = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152699] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.service_metadata_proxy = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.152862] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.service_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153040] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.service_type = network {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153208] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153368] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.status_code_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153525] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.status_code_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153686] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.153868] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154046] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] neutron.version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154224] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] notifications.bdms_in_notifications = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154399] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] notifications.default_level = INFO {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154572] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] notifications.notification_format = unversioned {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154735] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] notifications.notify_on_state_change = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.154911] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155096] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] pci.alias = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155266] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] pci.device_spec = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155429] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] pci.report_in_placement = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155601] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.auth_section = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155773] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.auth_type = password {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.155945] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.156115] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.156275] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.156437] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.156593] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.connect_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.156749] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.connect_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.156911] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.default_domain_id = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.157078] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.default_domain_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.157239] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.domain_id = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.157394] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.domain_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.157550] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.endpoint_override = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.157848] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158027] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158194] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.max_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158353] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.min_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158522] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.password = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158708] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.project_domain_id = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.158902] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.project_domain_name = Default {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.159094] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.project_id = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.159274] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.project_name = service {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.159443] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.region_name = RegionOne {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.159605] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.retriable_status_codes = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.159764] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.service_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.159935] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.service_type = placement {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160112] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160273] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.status_code_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160432] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.status_code_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160588] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.system_scope = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160747] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.160905] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.trust_id = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161073] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.user_domain_id = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161243] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.user_domain_name = Default {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161401] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.user_id = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161571] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.username = nova {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161750] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.161910] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] placement.version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.162099] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.cores = 20 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.162265] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.count_usage_from_placement = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.162436] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.162612] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.injected_file_content_bytes = 10240 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.162781] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.injected_file_path_length = 255 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163011] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.injected_files = 5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163218] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.instances = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163389] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.key_pairs = 100 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163558] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.metadata_items = 128 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.163863] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.ram = 51200 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.164069] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.recheck_quota = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.164249] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.server_group_members = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.164460] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] quota.server_groups = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.164589] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.164755] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.164919] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] scheduler.image_metadata_prefilter = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165092] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165263] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] scheduler.max_attempts = 3 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165429] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] scheduler.max_placement_results = 1000 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165606] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165794] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.165961] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.166150] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] scheduler.workers = 2 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.166328] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.166499] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.166679] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.166850] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.167023] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.167196] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.167372] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.167562] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.167761] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.host_subset_size = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168034] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168213] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168379] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168546] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.isolated_hosts = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168747] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.isolated_images = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.168948] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.169137] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.169305] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.169468] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.pci_in_placement = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.169630] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.169790] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.169956] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170161] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170305] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170465] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170623] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.track_instance_changes = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170799] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.170970] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] metrics.required = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.171145] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] metrics.weight_multiplier = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.171308] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.171470] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] metrics.weight_setting = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.171786] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.171962] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] serial_console.enabled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.172152] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] serial_console.port_range = 10000:20000 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.172322] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.172486] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.172654] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] serial_console.serialproxy_port = 6083 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.172822] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] service_user.auth_section = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173010] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] service_user.auth_type = password {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173171] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] service_user.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173331] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] service_user.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173494] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] service_user.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173655] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] service_user.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173814] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] service_user.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.173983] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] service_user.send_service_user_token = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.174160] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] service_user.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.174317] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] service_user.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.174482] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.agent_enabled = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.174642] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.enabled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.174939] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175144] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175316] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.html5proxy_port = 6082 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175476] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.image_compression = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175653] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.jpeg_compression = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175825] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.playback_compression = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.175987] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.require_secure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176169] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.server_listen = 127.0.0.1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176335] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176493] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.streaming_mode = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176647] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] spice.zlib_compression = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176810] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] upgrade_levels.baseapi = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.176977] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] upgrade_levels.compute = auto {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.177148] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] upgrade_levels.conductor = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.177304] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] upgrade_levels.scheduler = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.177469] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.177629] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.177821] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vendordata_dynamic_auth.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178081] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vendordata_dynamic_auth.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178272] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178437] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vendordata_dynamic_auth.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178593] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178785] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.178968] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vendordata_dynamic_auth.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179168] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.api_retry_count = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179326] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.ca_file = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179495] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179659] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.cluster_name = testcl1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179822] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.connection_pool_size = 10 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.179981] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.console_delay_seconds = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.180160] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.datastore_regex = ^datastore.* {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.180364] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.180533] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.host_password = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.180701] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.host_port = 443 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.180874] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.host_username = administrator@vsphere.local {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.181046] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.insecure = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.181210] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.integration_bridge = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.181374] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.maximum_objects = 100 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.181533] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.pbm_default_policy = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.181693] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.pbm_enabled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.181852] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.pbm_wsdl_location = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.182028] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.182192] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.serial_port_proxy_uri = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.182347] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.serial_port_service_uri = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.182511] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.task_poll_interval = 0.5 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.182679] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.use_linked_clone = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.182847] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.vnc_keymap = en-us {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.183014] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.vnc_port = 5900 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.183183] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vmware.vnc_port_total = 10000 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.183369] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vnc.auth_schemes = ['none'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.183543] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vnc.enabled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.183828] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.184022] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.184199] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vnc.novncproxy_port = 6080 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.184374] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vnc.server_listen = 127.0.0.1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.184547] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.184703] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vnc.vencrypt_ca_certs = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.184862] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vnc.vencrypt_client_cert = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.185030] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vnc.vencrypt_client_key = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.185216] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.185380] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.disable_deep_image_inspection = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.185540] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.185727] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.185896] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.186069] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.disable_rootwrap = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.186233] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.enable_numa_live_migration = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.186391] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.186550] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.186708] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.186869] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.libvirt_disable_apic = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.187031] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.187196] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.187356] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.187515] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.187676] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.187868] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.188110] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.188301] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.188466] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.188634] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.188863] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.189065] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] wsgi.client_socket_timeout = 900 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.189241] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] wsgi.default_pool_size = 1000 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.189407] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] wsgi.keep_alive = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.189575] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] wsgi.max_header_line = 16384 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.189739] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.189902] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] wsgi.ssl_ca_file = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.190070] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] wsgi.ssl_cert_file = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.190233] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] wsgi.ssl_key_file = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.190397] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] wsgi.tcp_keepidle = 600 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.190574] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.190738] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] zvm.ca_file = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.190899] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] zvm.cloud_connector_url = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.191197] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.191374] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] zvm.reachable_timeout = 300 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.191555] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_policy.enforce_new_defaults = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.191940] env[61898]: WARNING oslo_config.cfg [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 456.192136] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_policy.enforce_scope = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.192313] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_policy.policy_default_rule = default {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.192496] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.192669] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_policy.policy_file = policy.yaml {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.192838] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.192999] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.193176] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.193334] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.193495] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.193661] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.193836] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.194019] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler.connection_string = messaging:// {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.194189] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler.enabled = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.194361] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler.es_doc_type = notification {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.194527] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler.es_scroll_size = 10000 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.194693] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler.es_scroll_time = 2m {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.194907] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler.filter_error_trace = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.195113] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler.hmac_keys = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.195287] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler.sentinel_service_name = mymaster {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.195456] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler.socket_timeout = 0.1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.195639] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler.trace_requests = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.195817] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler.trace_sqlalchemy = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.196014] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler_jaeger.process_tags = {} {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.196181] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler_jaeger.service_name_prefix = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.196345] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] profiler_otlp.service_name_prefix = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.196510] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] remote_debug.host = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.196691] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] remote_debug.port = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.196892] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.197083] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.197256] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.197422] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.197616] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.197805] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.198040] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.198313] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.198497] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.198672] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.198861] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.199065] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.199243] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.199416] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.199589] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.199760] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.199930] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.200120] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.200289] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.200452] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.200656] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.200829] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.200997] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.201182] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.201346] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.201509] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.201671] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.201831] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.202010] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.202185] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.ssl = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.202358] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.202528] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.202692] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.202864] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.203043] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.203214] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.203403] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.203571] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_notifications.retry = -1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.203756] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.203932] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.204117] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.auth_section = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.204282] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.auth_type = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.204439] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.cafile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.204593] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.certfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.204757] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.collect_timing = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.204913] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.connect_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.205083] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.connect_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.205245] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.endpoint_id = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.205403] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.endpoint_override = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.205565] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.insecure = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.205749] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.keyfile = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.205912] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.max_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.206081] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.min_version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.206241] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.region_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.206401] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.retriable_status_codes = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.206556] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.service_name = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.206711] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.service_type = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.206873] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.split_loggers = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.207038] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.status_code_retries = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.207202] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.status_code_retry_delay = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.207358] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.timeout = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.207513] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.valid_interfaces = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.207666] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_limit.version = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.207867] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_reports.file_event_handler = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.208050] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.208214] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] oslo_reports.log_dir = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.208479] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.208647] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.208827] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.209023] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.209192] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.209352] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.209522] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.209680] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_ovs_privileged.group = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.209839] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.210016] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.210182] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.210339] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] vif_plug_ovs_privileged.user = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.210546] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.210685] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.210859] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.211054] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.211239] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.211408] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.211575] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.211738] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.211917] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.212114] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_ovs.isolate_vif = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.212304] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.212474] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.212643] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.212814] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.212977] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] os_vif_ovs.per_port_bridge = False {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.213165] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] privsep_osbrick.capabilities = [21] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.213325] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] privsep_osbrick.group = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.213483] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] privsep_osbrick.helper_command = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.213645] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.213810] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.213990] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] privsep_osbrick.user = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.214217] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.214383] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] nova_sys_admin.group = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.214542] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] nova_sys_admin.helper_command = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.214706] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.214871] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.215034] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] nova_sys_admin.user = None {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.215168] env[61898]: DEBUG oslo_service.service [None req-62ff2279-aee0-46fc-9400-4c2ed1a2bc49 None None] ******************************************************************************** {{(pid=61898) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 456.215676] env[61898]: INFO nova.service [-] Starting compute node (version 30.1.0) [ 456.719607] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Getting list of instances from cluster (obj){ [ 456.719607] env[61898]: value = "domain-c8" [ 456.719607] env[61898]: _type = "ClusterComputeResource" [ 456.719607] env[61898]: } {{(pid=61898) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 456.720886] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fb1774-e4bf-4ff9-9fbb-691d2c4fbb94 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.730099] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Got total of 0 instances {{(pid=61898) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 456.730624] env[61898]: WARNING nova.virt.vmwareapi.driver [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 456.731101] env[61898]: INFO nova.virt.node [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Generated node identity 79886f75-94e9-4bf0-9cbd-87f3715d3144 [ 456.731341] env[61898]: INFO nova.virt.node [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Wrote node identity 79886f75-94e9-4bf0-9cbd-87f3715d3144 to /opt/stack/data/n-cpu-1/compute_id [ 457.234255] env[61898]: WARNING nova.compute.manager [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Compute nodes ['79886f75-94e9-4bf0-9cbd-87f3715d3144'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 458.240100] env[61898]: INFO nova.compute.manager [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 459.245733] env[61898]: WARNING nova.compute.manager [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 459.246132] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 459.246256] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 459.246409] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 459.246565] env[61898]: DEBUG nova.compute.resource_tracker [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 459.247503] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771f5d58-6223-4d58-880a-c8ac67e9f47a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.256064] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7584f254-09b7-4539-b568-ff4959ce97a6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.269422] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f69a49-412c-4a93-b68b-7c4da4f918c9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.275698] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe2c5ae-9fc1-4389-a945-020774adb937 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.304424] env[61898]: DEBUG nova.compute.resource_tracker [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181481MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 459.304573] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 459.304756] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 459.807547] env[61898]: WARNING nova.compute.resource_tracker [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] No compute node record for cpu-1:79886f75-94e9-4bf0-9cbd-87f3715d3144: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 79886f75-94e9-4bf0-9cbd-87f3715d3144 could not be found. [ 460.311823] env[61898]: INFO nova.compute.resource_tracker [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 79886f75-94e9-4bf0-9cbd-87f3715d3144 [ 461.820361] env[61898]: DEBUG nova.compute.resource_tracker [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 461.820722] env[61898]: DEBUG nova.compute.resource_tracker [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 461.968046] env[61898]: INFO nova.scheduler.client.report [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] [req-ddf20a25-451f-482a-8416-e3d8f93c91e4] Created resource provider record via placement API for resource provider with UUID 79886f75-94e9-4bf0-9cbd-87f3715d3144 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 461.984614] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b4921a-5e16-41db-b587-25e0998e5f94 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 461.992083] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88572859-cfd1-46a2-b26d-e558523e3d7a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.022122] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f72caf-c115-4c39-b0c2-edd140d6d591 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.028829] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e96363-dd04-463a-b0cc-3d1d10bbb8eb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.041375] env[61898]: DEBUG nova.compute.provider_tree [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 462.579408] env[61898]: DEBUG nova.scheduler.client.report [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Updated inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 462.579745] env[61898]: DEBUG nova.compute.provider_tree [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Updating resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 generation from 0 to 1 during operation: update_inventory {{(pid=61898) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 462.579931] env[61898]: DEBUG nova.compute.provider_tree [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 462.628656] env[61898]: DEBUG nova.compute.provider_tree [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Updating resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 generation from 1 to 2 during operation: update_traits {{(pid=61898) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 463.137067] env[61898]: DEBUG nova.compute.resource_tracker [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 463.137436] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.832s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 463.137478] env[61898]: DEBUG nova.service [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Creating RPC server for service compute {{(pid=61898) start /opt/stack/nova/nova/service.py:186}} [ 463.151241] env[61898]: DEBUG nova.service [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] Join ServiceGroup membership for this service compute {{(pid=61898) start /opt/stack/nova/nova/service.py:203}} [ 463.151424] env[61898]: DEBUG nova.servicegroup.drivers.db [None req-0e64d1a3-5ff9-4767-bee5-3a5f3aaac033 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61898) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 500.192038] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquiring lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 500.192330] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 500.211157] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquiring lock "40fd8af8-586c-4292-9acf-fe211337b69a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 500.212671] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "40fd8af8-586c-4292-9acf-fe211337b69a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 500.698699] env[61898]: DEBUG nova.compute.manager [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 500.714495] env[61898]: DEBUG nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 500.878782] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Acquiring lock "63f32d41-18e3-4918-981d-10e8f22423b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 500.879029] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Lock "63f32d41-18e3-4918-981d-10e8f22423b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 501.260866] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 501.262359] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 501.265503] env[61898]: INFO nova.compute.claims [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 501.271606] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 501.386622] env[61898]: DEBUG nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 501.822356] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquiring lock "052ff2f0-770a-4511-ae0c-e351ad987904" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 501.824093] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "052ff2f0-770a-4511-ae0c-e351ad987904" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 501.919777] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 502.328022] env[61898]: DEBUG nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 502.364210] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe23850-0a8f-4f53-bd71-6b0e8173f205 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.372472] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1abb6e-c75a-4bf8-a098-10759970100c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.407815] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdeef3bb-3e03-47ee-9ed2-c643697b3eaf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.420901] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c2d520-adca-4cef-819e-ca81d044b93f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 502.436165] env[61898]: DEBUG nova.compute.provider_tree [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 502.848448] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 502.942125] env[61898]: DEBUG nova.scheduler.client.report [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 503.230118] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquiring lock "4c02ebbd-345f-4253-bc28-f90c731c78aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 503.230288] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "4c02ebbd-345f-4253-bc28-f90c731c78aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 503.451517] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.187s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 503.451517] env[61898]: DEBUG nova.compute.manager [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 503.454124] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.181s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 503.455657] env[61898]: INFO nova.compute.claims [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 503.736895] env[61898]: DEBUG nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 503.955100] env[61898]: DEBUG nova.compute.utils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 503.956208] env[61898]: DEBUG nova.compute.manager [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Not allocating networking since 'none' was specified. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 504.278779] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 504.459609] env[61898]: DEBUG nova.compute.manager [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 504.563018] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a87e21d-6278-45da-a1d7-8468288e6519 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.570496] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d856a7a9-e451-421a-b2be-571ab09309a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.604545] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b10cb0-f265-49bf-bfce-b92b64a5f53a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.612315] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a89a1b-87ab-4561-a617-cf553b3e1352 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 504.628881] env[61898]: DEBUG nova.compute.provider_tree [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 505.132278] env[61898]: DEBUG nova.scheduler.client.report [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 505.475844] env[61898]: DEBUG nova.compute.manager [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 505.638772] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.186s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 505.640335] env[61898]: DEBUG nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 505.646604] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.724s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 505.646604] env[61898]: INFO nova.compute.claims [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 506.024085] env[61898]: DEBUG nova.virt.hardware [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 506.024364] env[61898]: DEBUG nova.virt.hardware [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 506.024517] env[61898]: DEBUG nova.virt.hardware [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 506.024703] env[61898]: DEBUG nova.virt.hardware [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 506.024848] env[61898]: DEBUG nova.virt.hardware [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 506.024998] env[61898]: DEBUG nova.virt.hardware [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 506.026550] env[61898]: DEBUG nova.virt.hardware [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 506.026550] env[61898]: DEBUG nova.virt.hardware [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 506.026550] env[61898]: DEBUG nova.virt.hardware [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 506.026550] env[61898]: DEBUG nova.virt.hardware [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 506.026970] env[61898]: DEBUG nova.virt.hardware [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 506.028330] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0556645e-3024-4220-9721-13a67e8c6084 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.037495] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c390ae9d-06d1-48ca-987e-e4341c2e5eea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.056993] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16036c2-7468-408b-804e-361e8b2201aa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.075636] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 506.085816] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 506.086121] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee30c034-9835-475e-a790-fee9999a993c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.101558] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Created folder: OpenStack in parent group-v4. [ 506.101815] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Creating folder: Project (d2b2ab14c50d4b379a0b5dcfba503b8c). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 506.102042] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1e10421-d094-4934-a8f8-8182548aeb5b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.112969] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Created folder: Project (d2b2ab14c50d4b379a0b5dcfba503b8c) in parent group-v267550. [ 506.114123] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Creating folder: Instances. Parent ref: group-v267551. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 506.114353] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9c46f2d-24a5-4538-a1a6-c627f47f7ebc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.125810] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Created folder: Instances in parent group-v267551. [ 506.126189] env[61898]: DEBUG oslo.service.loopingcall [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 506.126318] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 506.126636] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c67009e-d0d2-4fde-9402-0cdfe04488d7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.146650] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 506.146650] env[61898]: value = "task-1240323" [ 506.146650] env[61898]: _type = "Task" [ 506.146650] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 506.159090] env[61898]: DEBUG nova.compute.utils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 506.161085] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240323, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 506.161818] env[61898]: DEBUG nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 506.161932] env[61898]: DEBUG nova.network.neutron [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 506.260085] env[61898]: DEBUG nova.policy [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fba3c68fac24de18fbebaadfa0b93a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46fbee4035b847ef8b8150edf2bd6aeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 506.663211] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240323, 'name': CreateVM_Task, 'duration_secs': 0.299824} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 506.663211] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 506.663548] env[61898]: DEBUG oslo_vmware.service [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6801c251-ec8f-42a1-acee-f4bee43274f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.669777] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 506.669915] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 506.673811] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 506.673811] env[61898]: DEBUG nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 506.673811] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b66277e1-7f68-4b88-82e4-17ccfe3b6e90 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.687423] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 506.687423] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a220aa-acd4-3f7b-122f-460a5e788d0d" [ 506.687423] env[61898]: _type = "Task" [ 506.687423] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 506.695328] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a220aa-acd4-3f7b-122f-460a5e788d0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 506.737885] env[61898]: DEBUG nova.network.neutron [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Successfully created port: 1172a41c-e8d2-41a4-95d9-b361dfa1fe4a {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 506.788581] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac0d0d6-a74d-4785-af02-154adce6deac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.796802] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb6bf03-44ba-462a-a840-7382bf873ee9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.830985] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8030c2fa-bc7d-4c5a-b7d7-172658e4e859 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.847449] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7a4c74-400f-47f2-9763-a8ee17003d7d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.863861] env[61898]: DEBUG nova.compute.provider_tree [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 507.199967] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 507.201701] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 507.201951] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 507.202157] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 507.203235] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 507.203601] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd6fd239-5e4b-4a76-9337-c66ffa1e3a48 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.217187] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 507.217187] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 507.217468] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea5aa14-45cb-473c-acaf-c5ad82baaaa0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.227444] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b3a8aa2-6370-4021-bb05-bcd8d52174cf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.232894] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 507.232894] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a66b4f-1612-2b9c-2cdd-a633ade5474b" [ 507.232894] env[61898]: _type = "Task" [ 507.232894] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 507.243764] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a66b4f-1612-2b9c-2cdd-a633ade5474b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 507.371231] env[61898]: DEBUG nova.scheduler.client.report [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 507.685671] env[61898]: DEBUG nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 507.715569] env[61898]: DEBUG nova.virt.hardware [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 507.716218] env[61898]: DEBUG nova.virt.hardware [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 507.717042] env[61898]: DEBUG nova.virt.hardware [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 507.717698] env[61898]: DEBUG nova.virt.hardware [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 507.717698] env[61898]: DEBUG nova.virt.hardware [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 507.717698] env[61898]: DEBUG nova.virt.hardware [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 507.720590] env[61898]: DEBUG nova.virt.hardware [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 507.720590] env[61898]: DEBUG nova.virt.hardware [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 507.720590] env[61898]: DEBUG nova.virt.hardware [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 507.720590] env[61898]: DEBUG nova.virt.hardware [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 507.723016] env[61898]: DEBUG nova.virt.hardware [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 507.723016] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175146cb-3b9a-459f-9dac-d96079c08ce1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.733190] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc06dad-83c6-4042-99cb-567ef8d0019e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.768663] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Preparing fetch location {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 507.769260] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Creating directory with path [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 507.769531] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7c7079b-287e-4c11-8f58-d847fd704f17 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.813312] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Created directory with path [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 507.813312] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Fetch image to [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 507.813312] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Downloading image file data e07a6c11-ab12-4187-81fc-1a28a9d1e65d to [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61898) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 507.814526] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae1ef19-02a5-4d1b-8625-43aac9b7ed94 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.825238] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264dca8a-c5ab-4950-a9cf-9a5cefeb1e40 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.835991] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a86626-a381-47c8-9456-0b31054195c2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.872759] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7891fc7c-3510-4e9c-a771-228704fa1644 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.877204] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 507.878849] env[61898]: DEBUG nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 507.884956] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.035s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 507.884956] env[61898]: INFO nova.compute.claims [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 507.887973] env[61898]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-55beb071-e7bf-4a8a-8d13-bc4618b1a4ce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.918405] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Downloading image file data e07a6c11-ab12-4187-81fc-1a28a9d1e65d to the data store datastore2 {{(pid=61898) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 507.994451] env[61898]: DEBUG oslo_vmware.rw_handles [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61898) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 508.390433] env[61898]: DEBUG nova.compute.utils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 508.391975] env[61898]: DEBUG nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 508.392613] env[61898]: DEBUG nova.network.neutron [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 508.490936] env[61898]: DEBUG nova.policy [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0bac6d1e06a046db8bd4722aabb4b483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20504ba74b1c40e784dc10e7f105763b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 508.781750] env[61898]: DEBUG oslo_vmware.rw_handles [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Completed reading data from the image iterator. {{(pid=61898) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 508.781990] env[61898]: DEBUG oslo_vmware.rw_handles [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 508.840676] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Downloaded image file data e07a6c11-ab12-4187-81fc-1a28a9d1e65d to vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk on the data store datastore2 {{(pid=61898) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 508.842727] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Caching image {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 508.843851] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Copying Virtual Disk [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk to [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 508.843851] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1a9675d-025a-45aa-b787-1fe6e0a0cdab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.853778] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 508.853778] env[61898]: value = "task-1240324" [ 508.853778] env[61898]: _type = "Task" [ 508.853778] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 508.863864] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240324, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 508.904915] env[61898]: DEBUG nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 509.062957] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a4400f-e0c2-432a-aed6-06f46b95bc9a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.073946] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a517ce-fc98-410b-84c3-f38b76800a43 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.110653] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72ed372-0295-4c5b-abb5-f2d163dbdedf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.118795] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0d84b4-23f7-482d-b924-6206595aa2a8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.142430] env[61898]: DEBUG nova.compute.provider_tree [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 509.365691] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240324, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 509.476188] env[61898]: DEBUG nova.network.neutron [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Successfully created port: 3e565297-dbb9-4d50-97af-752995c852a2 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 509.646643] env[61898]: DEBUG nova.scheduler.client.report [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 509.868761] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240324, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.667178} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 509.869137] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Copied Virtual Disk [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk to [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 509.869417] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Deleting the datastore file [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 509.869626] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0407ec4a-9848-4496-9987-7a8d3dd727d9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.876798] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 509.876798] env[61898]: value = "task-1240325" [ 509.876798] env[61898]: _type = "Task" [ 509.876798] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 509.884944] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240325, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 509.924524] env[61898]: DEBUG nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 509.929624] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Acquiring lock "ba29c234-4f7b-414c-9b6b-6a2fa68e9533" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 509.930038] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Lock "ba29c234-4f7b-414c-9b6b-6a2fa68e9533" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 509.958221] env[61898]: DEBUG nova.virt.hardware [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 509.958380] env[61898]: DEBUG nova.virt.hardware [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 509.958486] env[61898]: DEBUG nova.virt.hardware [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 509.958673] env[61898]: DEBUG nova.virt.hardware [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 509.958816] env[61898]: DEBUG nova.virt.hardware [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 509.958962] env[61898]: DEBUG nova.virt.hardware [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 509.959246] env[61898]: DEBUG nova.virt.hardware [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 509.959398] env[61898]: DEBUG nova.virt.hardware [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 509.959614] env[61898]: DEBUG nova.virt.hardware [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 509.959742] env[61898]: DEBUG nova.virt.hardware [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 509.959898] env[61898]: DEBUG nova.virt.hardware [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 509.961034] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1627ba-c33c-4b41-8ea5-61d39a4bf7ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.969416] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7079a4bf-e1b9-4a1e-9502-87156736aa67 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.158293] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.272s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 510.158293] env[61898]: DEBUG nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 510.165408] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.883s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.166750] env[61898]: INFO nova.compute.claims [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 510.284431] env[61898]: ERROR nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1172a41c-e8d2-41a4-95d9-b361dfa1fe4a, please check neutron logs for more information. [ 510.284431] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 510.284431] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 510.284431] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 510.284431] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 510.284431] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 510.284431] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 510.284431] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 510.284431] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 510.284431] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 510.284431] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 510.284431] env[61898]: ERROR nova.compute.manager raise self.value [ 510.284431] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 510.284431] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 510.284431] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 510.284431] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 510.284851] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 510.284851] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 510.284851] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1172a41c-e8d2-41a4-95d9-b361dfa1fe4a, please check neutron logs for more information. [ 510.284851] env[61898]: ERROR nova.compute.manager [ 510.284851] env[61898]: Traceback (most recent call last): [ 510.284851] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 510.284851] env[61898]: listener.cb(fileno) [ 510.284851] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 510.284851] env[61898]: result = function(*args, **kwargs) [ 510.284851] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 510.284851] env[61898]: return func(*args, **kwargs) [ 510.284851] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 510.284851] env[61898]: raise e [ 510.284851] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 510.284851] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 510.284851] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 510.284851] env[61898]: created_port_ids = self._update_ports_for_instance( [ 510.284851] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 510.284851] env[61898]: with excutils.save_and_reraise_exception(): [ 510.284851] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 510.284851] env[61898]: self.force_reraise() [ 510.284851] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 510.284851] env[61898]: raise self.value [ 510.284851] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 510.284851] env[61898]: updated_port = self._update_port( [ 510.284851] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 510.284851] env[61898]: _ensure_no_port_binding_failure(port) [ 510.284851] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 510.284851] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 510.285549] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 1172a41c-e8d2-41a4-95d9-b361dfa1fe4a, please check neutron logs for more information. [ 510.285549] env[61898]: Removing descriptor: 15 [ 510.285549] env[61898]: ERROR nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1172a41c-e8d2-41a4-95d9-b361dfa1fe4a, please check neutron logs for more information. [ 510.285549] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Traceback (most recent call last): [ 510.285549] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 510.285549] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] yield resources [ 510.285549] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 510.285549] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] self.driver.spawn(context, instance, image_meta, [ 510.285549] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 510.285549] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 510.285549] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 510.285549] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] vm_ref = self.build_virtual_machine(instance, [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] vif_infos = vmwarevif.get_vif_info(self._session, [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] for vif in network_info: [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] return self._sync_wrapper(fn, *args, **kwargs) [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] self.wait() [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] self[:] = self._gt.wait() [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] return self._exit_event.wait() [ 510.285834] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] result = hub.switch() [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] return self.greenlet.switch() [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] result = function(*args, **kwargs) [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] return func(*args, **kwargs) [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] raise e [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] nwinfo = self.network_api.allocate_for_instance( [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 510.286148] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] created_port_ids = self._update_ports_for_instance( [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] with excutils.save_and_reraise_exception(): [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] self.force_reraise() [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] raise self.value [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] updated_port = self._update_port( [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] _ensure_no_port_binding_failure(port) [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 510.286445] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] raise exception.PortBindingFailed(port_id=port['id']) [ 510.286813] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] nova.exception.PortBindingFailed: Binding failed for port 1172a41c-e8d2-41a4-95d9-b361dfa1fe4a, please check neutron logs for more information. [ 510.286813] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] [ 510.286813] env[61898]: INFO nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Terminating instance [ 510.390412] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240325, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024133} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 510.390793] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 510.391120] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Moving file from [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c/e07a6c11-ab12-4187-81fc-1a28a9d1e65d to [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d. {{(pid=61898) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 510.391457] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-7e8e0855-492d-4c2e-a3b4-65682b3425bf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.398639] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 510.398639] env[61898]: value = "task-1240326" [ 510.398639] env[61898]: _type = "Task" [ 510.398639] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 510.411396] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240326, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 510.432060] env[61898]: DEBUG nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 510.664628] env[61898]: DEBUG nova.compute.utils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 510.667088] env[61898]: DEBUG nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 510.667336] env[61898]: DEBUG nova.network.neutron [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 510.791424] env[61898]: DEBUG nova.policy [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f57307029ad249cfa2f9f1fb4b65bd98', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d3355cf485445ca934f36e02fe191f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 510.794184] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquiring lock "refresh_cache-40fd8af8-586c-4292-9acf-fe211337b69a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 510.794184] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquired lock "refresh_cache-40fd8af8-586c-4292-9acf-fe211337b69a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 510.794184] env[61898]: DEBUG nova.network.neutron [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 510.913768] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240326, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.024357} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 510.914243] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] File moved {{(pid=61898) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 510.914680] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Cleaning up location [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 510.914935] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Deleting the datastore file [datastore2] vmware_temp/9e87fbc0-fe77-476f-a946-64becc439d9c {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 510.915297] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3a2b253-d345-44e0-a1e0-5cabb40c7645 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.921573] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 510.921573] env[61898]: value = "task-1240327" [ 510.921573] env[61898]: _type = "Task" [ 510.921573] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 510.932424] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240327, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 510.963965] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 511.171939] env[61898]: DEBUG nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 511.324488] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b54b01-a189-4115-9ab1-fdfb3a1b65d6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.333802] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6960098d-58e3-4fa3-a609-583018b7377c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.380399] env[61898]: DEBUG nova.network.neutron [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 511.383504] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd27523e-4b1e-4e81-aebd-a36da4b2d62c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.392203] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0718ac62-2eef-4e1a-91c5-ecd0787d4b99 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.406980] env[61898]: DEBUG nova.compute.provider_tree [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 511.436330] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240327, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024302} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 511.436594] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 511.437391] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1ce0a5e-f9db-45e9-bf17-622fd96e3e87 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.443538] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 511.443538] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d5289d-0b1f-c265-4b14-56342078dfa0" [ 511.443538] env[61898]: _type = "Task" [ 511.443538] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 511.454279] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d5289d-0b1f-c265-4b14-56342078dfa0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 511.480326] env[61898]: ERROR nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3e565297-dbb9-4d50-97af-752995c852a2, please check neutron logs for more information. [ 511.480326] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 511.480326] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 511.480326] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 511.480326] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 511.480326] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 511.480326] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 511.480326] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 511.480326] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 511.480326] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 511.480326] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 511.480326] env[61898]: ERROR nova.compute.manager raise self.value [ 511.480326] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 511.480326] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 511.480326] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 511.480326] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 511.480755] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 511.480755] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 511.480755] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3e565297-dbb9-4d50-97af-752995c852a2, please check neutron logs for more information. [ 511.480755] env[61898]: ERROR nova.compute.manager [ 511.480755] env[61898]: Traceback (most recent call last): [ 511.480755] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 511.480755] env[61898]: listener.cb(fileno) [ 511.480755] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 511.480755] env[61898]: result = function(*args, **kwargs) [ 511.480755] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 511.480755] env[61898]: return func(*args, **kwargs) [ 511.480755] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 511.480755] env[61898]: raise e [ 511.480755] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 511.480755] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 511.480755] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 511.480755] env[61898]: created_port_ids = self._update_ports_for_instance( [ 511.480755] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 511.480755] env[61898]: with excutils.save_and_reraise_exception(): [ 511.480755] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 511.480755] env[61898]: self.force_reraise() [ 511.480755] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 511.480755] env[61898]: raise self.value [ 511.480755] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 511.480755] env[61898]: updated_port = self._update_port( [ 511.480755] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 511.480755] env[61898]: _ensure_no_port_binding_failure(port) [ 511.480755] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 511.480755] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 511.481486] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 3e565297-dbb9-4d50-97af-752995c852a2, please check neutron logs for more information. [ 511.481486] env[61898]: Removing descriptor: 20 [ 511.481486] env[61898]: ERROR nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3e565297-dbb9-4d50-97af-752995c852a2, please check neutron logs for more information. [ 511.481486] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Traceback (most recent call last): [ 511.481486] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 511.481486] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] yield resources [ 511.481486] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 511.481486] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] self.driver.spawn(context, instance, image_meta, [ 511.481486] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 511.481486] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 511.481486] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 511.481486] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] vm_ref = self.build_virtual_machine(instance, [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] vif_infos = vmwarevif.get_vif_info(self._session, [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] for vif in network_info: [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] return self._sync_wrapper(fn, *args, **kwargs) [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] self.wait() [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] self[:] = self._gt.wait() [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] return self._exit_event.wait() [ 511.481789] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] result = hub.switch() [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] return self.greenlet.switch() [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] result = function(*args, **kwargs) [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] return func(*args, **kwargs) [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] raise e [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] nwinfo = self.network_api.allocate_for_instance( [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 511.482145] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] created_port_ids = self._update_ports_for_instance( [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] with excutils.save_and_reraise_exception(): [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] self.force_reraise() [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] raise self.value [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] updated_port = self._update_port( [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] _ensure_no_port_binding_failure(port) [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 511.482506] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] raise exception.PortBindingFailed(port_id=port['id']) [ 511.482815] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] nova.exception.PortBindingFailed: Binding failed for port 3e565297-dbb9-4d50-97af-752995c852a2, please check neutron logs for more information. [ 511.482815] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] [ 511.482815] env[61898]: INFO nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Terminating instance [ 511.542155] env[61898]: DEBUG nova.compute.manager [req-a2ac36ce-8ffc-4757-8197-e3bccb824878 req-14713c2b-9589-4402-98d4-5477d09f2f3e service nova] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Received event network-changed-1172a41c-e8d2-41a4-95d9-b361dfa1fe4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 511.542359] env[61898]: DEBUG nova.compute.manager [req-a2ac36ce-8ffc-4757-8197-e3bccb824878 req-14713c2b-9589-4402-98d4-5477d09f2f3e service nova] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Refreshing instance network info cache due to event network-changed-1172a41c-e8d2-41a4-95d9-b361dfa1fe4a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 511.542545] env[61898]: DEBUG oslo_concurrency.lockutils [req-a2ac36ce-8ffc-4757-8197-e3bccb824878 req-14713c2b-9589-4402-98d4-5477d09f2f3e service nova] Acquiring lock "refresh_cache-40fd8af8-586c-4292-9acf-fe211337b69a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 511.562644] env[61898]: DEBUG nova.network.neutron [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 511.671232] env[61898]: DEBUG nova.network.neutron [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Successfully created port: 7e967d97-acf7-4035-8404-668980138d1a {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 511.914335] env[61898]: DEBUG nova.scheduler.client.report [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 511.957591] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d5289d-0b1f-c265-4b14-56342078dfa0, 'name': SearchDatastore_Task, 'duration_secs': 0.008907} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 511.957591] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 511.957591] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9/8a5c9847-fc0d-41f7-87b8-d7ff44073ea9.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 511.957591] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-315190af-20a0-421b-a3f5-a73225435893 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.966283] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 511.966283] env[61898]: value = "task-1240328" [ 511.966283] env[61898]: _type = "Task" [ 511.966283] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 511.975180] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240328, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 511.988556] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Acquiring lock "refresh_cache-63f32d41-18e3-4918-981d-10e8f22423b8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 511.989034] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Acquired lock "refresh_cache-63f32d41-18e3-4918-981d-10e8f22423b8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 511.989309] env[61898]: DEBUG nova.network.neutron [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 512.067486] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Releasing lock "refresh_cache-40fd8af8-586c-4292-9acf-fe211337b69a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 512.067486] env[61898]: DEBUG nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 512.067486] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 512.067486] env[61898]: DEBUG oslo_concurrency.lockutils [req-a2ac36ce-8ffc-4757-8197-e3bccb824878 req-14713c2b-9589-4402-98d4-5477d09f2f3e service nova] Acquired lock "refresh_cache-40fd8af8-586c-4292-9acf-fe211337b69a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 512.067486] env[61898]: DEBUG nova.network.neutron [req-a2ac36ce-8ffc-4757-8197-e3bccb824878 req-14713c2b-9589-4402-98d4-5477d09f2f3e service nova] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Refreshing network info cache for port 1172a41c-e8d2-41a4-95d9-b361dfa1fe4a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 512.069435] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16fe0d94-e5d6-409b-b810-6f07cc349e8a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.081132] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ddf755a-c351-48b1-b65c-9db90b767e0b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.106262] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 40fd8af8-586c-4292-9acf-fe211337b69a could not be found. [ 512.106562] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 512.106932] env[61898]: INFO nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 512.107168] env[61898]: DEBUG oslo.service.loopingcall [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 512.107382] env[61898]: DEBUG nova.compute.manager [-] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 512.108015] env[61898]: DEBUG nova.network.neutron [-] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 512.189029] env[61898]: DEBUG nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 512.194823] env[61898]: DEBUG nova.network.neutron [-] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 512.232391] env[61898]: DEBUG nova.virt.hardware [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 512.232571] env[61898]: DEBUG nova.virt.hardware [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 512.232727] env[61898]: DEBUG nova.virt.hardware [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 512.232974] env[61898]: DEBUG nova.virt.hardware [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 512.233155] env[61898]: DEBUG nova.virt.hardware [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 512.233306] env[61898]: DEBUG nova.virt.hardware [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 512.233517] env[61898]: DEBUG nova.virt.hardware [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 512.233669] env[61898]: DEBUG nova.virt.hardware [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 512.233917] env[61898]: DEBUG nova.virt.hardware [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 512.234124] env[61898]: DEBUG nova.virt.hardware [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 512.234429] env[61898]: DEBUG nova.virt.hardware [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 512.235794] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781dea15-04e9-46fb-8ad6-6a9b7416ced0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.245280] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9209dcbb-ebc2-4073-a6fd-3ba6e1c095ff {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.420140] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 512.420616] env[61898]: DEBUG nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 512.427790] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.463s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.428552] env[61898]: INFO nova.compute.claims [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 512.432894] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Acquiring lock "57f77a5f-87d0-4a9a-80ea-4b24baf33d02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.436258] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Lock "57f77a5f-87d0-4a9a-80ea-4b24baf33d02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.477383] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240328, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462396} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 512.479282] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9/8a5c9847-fc0d-41f7-87b8-d7ff44073ea9.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 512.479282] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 512.479282] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25db2ace-3e29-4299-b43d-0884c2a7212b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.485851] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 512.485851] env[61898]: value = "task-1240329" [ 512.485851] env[61898]: _type = "Task" [ 512.485851] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 512.499998] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240329, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 512.546752] env[61898]: DEBUG nova.network.neutron [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 512.698567] env[61898]: DEBUG nova.network.neutron [-] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 512.700559] env[61898]: DEBUG nova.network.neutron [req-a2ac36ce-8ffc-4757-8197-e3bccb824878 req-14713c2b-9589-4402-98d4-5477d09f2f3e service nova] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 512.823638] env[61898]: DEBUG nova.network.neutron [req-a2ac36ce-8ffc-4757-8197-e3bccb824878 req-14713c2b-9589-4402-98d4-5477d09f2f3e service nova] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 512.872773] env[61898]: DEBUG nova.network.neutron [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 512.928225] env[61898]: DEBUG nova.compute.utils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 512.929787] env[61898]: DEBUG nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 512.929977] env[61898]: DEBUG nova.network.neutron [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 512.941141] env[61898]: DEBUG nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 512.996826] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240329, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061406} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 512.997160] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 512.998048] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84dd1b6-910f-4cc4-9a48-6082c5ac6d0a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.028815] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9/8a5c9847-fc0d-41f7-87b8-d7ff44073ea9.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 513.029637] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b09b896f-8b2b-4387-b2ba-f7b6e0d0cf4a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.052176] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 513.052176] env[61898]: value = "task-1240330" [ 513.052176] env[61898]: _type = "Task" [ 513.052176] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 513.064910] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240330, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 513.106145] env[61898]: DEBUG nova.policy [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e56a517c1aca416d810368ad50a1719f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '281dcbf7480543588e645530376457ad', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 513.204736] env[61898]: INFO nova.compute.manager [-] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Took 1.10 seconds to deallocate network for instance. [ 513.209540] env[61898]: DEBUG nova.compute.claims [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 513.209737] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.329298] env[61898]: DEBUG oslo_concurrency.lockutils [req-a2ac36ce-8ffc-4757-8197-e3bccb824878 req-14713c2b-9589-4402-98d4-5477d09f2f3e service nova] Releasing lock "refresh_cache-40fd8af8-586c-4292-9acf-fe211337b69a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 513.338966] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Acquiring lock "8c006d14-dbd9-40b9-a474-06382fea3b2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.339222] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Lock "8c006d14-dbd9-40b9-a474-06382fea3b2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.375085] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Releasing lock "refresh_cache-63f32d41-18e3-4918-981d-10e8f22423b8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 513.375504] env[61898]: DEBUG nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 513.375685] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 513.375960] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f10148ac-328e-4a13-9caa-12aba7377a19 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.384746] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3688c4-670a-4f65-bf1a-66c71472c799 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.411141] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 63f32d41-18e3-4918-981d-10e8f22423b8 could not be found. [ 513.411141] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 513.411556] env[61898]: INFO nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 513.411672] env[61898]: DEBUG oslo.service.loopingcall [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 513.412052] env[61898]: DEBUG nova.compute.manager [-] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 513.412052] env[61898]: DEBUG nova.network.neutron [-] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 513.432726] env[61898]: DEBUG nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 513.461247] env[61898]: DEBUG nova.network.neutron [-] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 513.474051] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.571540] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240330, 'name': ReconfigVM_Task, 'duration_secs': 0.287738} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 513.574218] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9/8a5c9847-fc0d-41f7-87b8-d7ff44073ea9.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 513.575082] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-efcda14e-6abb-44b8-a790-adb3f4913dbc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.585754] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 513.585754] env[61898]: value = "task-1240331" [ 513.585754] env[61898]: _type = "Task" [ 513.585754] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 513.599653] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240331, 'name': Rename_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 513.610022] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39c7c21-c67d-448f-b658-df1e803dd2d6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.620070] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f602ed86-b94c-4261-a2c8-78c402e5384c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.654255] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc28f939-7190-4c82-875f-9590bab5b334 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.662563] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe50a02-6ed6-451f-b5a7-50ec60ab560f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.679391] env[61898]: DEBUG nova.compute.provider_tree [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 513.841615] env[61898]: DEBUG nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 513.963672] env[61898]: DEBUG nova.network.neutron [-] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 514.103778] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240331, 'name': Rename_Task, 'duration_secs': 0.139217} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 514.104612] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 514.105114] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f72aceb2-4e39-4815-8f13-bba2d927f3e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.118022] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 514.118022] env[61898]: value = "task-1240332" [ 514.118022] env[61898]: _type = "Task" [ 514.118022] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 514.127020] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240332, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 514.186861] env[61898]: DEBUG nova.scheduler.client.report [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 514.249357] env[61898]: DEBUG nova.network.neutron [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Successfully created port: 3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 514.383739] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.444441] env[61898]: DEBUG nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 514.467368] env[61898]: INFO nova.compute.manager [-] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Took 1.06 seconds to deallocate network for instance. [ 514.471556] env[61898]: DEBUG nova.compute.claims [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 514.471734] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.483248] env[61898]: DEBUG nova.virt.hardware [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 514.483661] env[61898]: DEBUG nova.virt.hardware [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 514.483994] env[61898]: DEBUG nova.virt.hardware [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 514.484757] env[61898]: DEBUG nova.virt.hardware [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 514.484757] env[61898]: DEBUG nova.virt.hardware [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 514.484757] env[61898]: DEBUG nova.virt.hardware [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 514.484918] env[61898]: DEBUG nova.virt.hardware [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 514.485101] env[61898]: DEBUG nova.virt.hardware [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 514.485382] env[61898]: DEBUG nova.virt.hardware [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 514.485382] env[61898]: DEBUG nova.virt.hardware [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 514.485884] env[61898]: DEBUG nova.virt.hardware [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 514.486799] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9d5317-185b-42ac-86fa-ef22360ce95e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.495547] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f2f8ea-38d2-4c2e-b4e3-84280349ee73 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.614380] env[61898]: DEBUG nova.compute.manager [req-f4e81f7f-4471-4b0c-8e4f-eb9abd5053a8 req-682c3811-e409-4be7-b2f3-36999f106715 service nova] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Received event network-changed-3e565297-dbb9-4d50-97af-752995c852a2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 514.616504] env[61898]: DEBUG nova.compute.manager [req-f4e81f7f-4471-4b0c-8e4f-eb9abd5053a8 req-682c3811-e409-4be7-b2f3-36999f106715 service nova] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Refreshing instance network info cache due to event network-changed-3e565297-dbb9-4d50-97af-752995c852a2. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 514.616504] env[61898]: DEBUG oslo_concurrency.lockutils [req-f4e81f7f-4471-4b0c-8e4f-eb9abd5053a8 req-682c3811-e409-4be7-b2f3-36999f106715 service nova] Acquiring lock "refresh_cache-63f32d41-18e3-4918-981d-10e8f22423b8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 514.616504] env[61898]: DEBUG oslo_concurrency.lockutils [req-f4e81f7f-4471-4b0c-8e4f-eb9abd5053a8 req-682c3811-e409-4be7-b2f3-36999f106715 service nova] Acquired lock "refresh_cache-63f32d41-18e3-4918-981d-10e8f22423b8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 514.616504] env[61898]: DEBUG nova.network.neutron [req-f4e81f7f-4471-4b0c-8e4f-eb9abd5053a8 req-682c3811-e409-4be7-b2f3-36999f106715 service nova] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Refreshing network info cache for port 3e565297-dbb9-4d50-97af-752995c852a2 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 514.633171] env[61898]: DEBUG oslo_vmware.api [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240332, 'name': PowerOnVM_Task, 'duration_secs': 0.463736} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 514.633672] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 514.633861] env[61898]: INFO nova.compute.manager [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Took 9.16 seconds to spawn the instance on the hypervisor. [ 514.634125] env[61898]: DEBUG nova.compute.manager [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 514.635312] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b8cb8d-25ed-4a2d-a2e3-e68bdcb57a91 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.699193] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.271s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.699193] env[61898]: DEBUG nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 514.706394] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 1.496s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.154268] env[61898]: DEBUG nova.network.neutron [req-f4e81f7f-4471-4b0c-8e4f-eb9abd5053a8 req-682c3811-e409-4be7-b2f3-36999f106715 service nova] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 515.160629] env[61898]: INFO nova.compute.manager [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Took 13.95 seconds to build instance. [ 515.210043] env[61898]: DEBUG nova.compute.utils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 515.210043] env[61898]: DEBUG nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 515.210696] env[61898]: DEBUG nova.network.neutron [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 515.215637] env[61898]: ERROR nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7e967d97-acf7-4035-8404-668980138d1a, please check neutron logs for more information. [ 515.215637] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 515.215637] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 515.215637] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 515.215637] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 515.215637] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 515.215637] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 515.215637] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 515.215637] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.215637] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 515.215637] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.215637] env[61898]: ERROR nova.compute.manager raise self.value [ 515.215637] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 515.215637] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 515.215637] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.215637] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 515.216118] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.216118] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 515.216118] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7e967d97-acf7-4035-8404-668980138d1a, please check neutron logs for more information. [ 515.216118] env[61898]: ERROR nova.compute.manager [ 515.219024] env[61898]: Traceback (most recent call last): [ 515.219024] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 515.219024] env[61898]: listener.cb(fileno) [ 515.219024] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.219024] env[61898]: result = function(*args, **kwargs) [ 515.219024] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 515.219024] env[61898]: return func(*args, **kwargs) [ 515.219024] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 515.219024] env[61898]: raise e [ 515.219024] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 515.219024] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 515.219024] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 515.219024] env[61898]: created_port_ids = self._update_ports_for_instance( [ 515.219024] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 515.219024] env[61898]: with excutils.save_and_reraise_exception(): [ 515.219024] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.219024] env[61898]: self.force_reraise() [ 515.219024] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.219024] env[61898]: raise self.value [ 515.219024] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 515.219024] env[61898]: updated_port = self._update_port( [ 515.219024] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.219024] env[61898]: _ensure_no_port_binding_failure(port) [ 515.219024] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.219024] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 515.219024] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 7e967d97-acf7-4035-8404-668980138d1a, please check neutron logs for more information. [ 515.219024] env[61898]: Removing descriptor: 15 [ 515.221860] env[61898]: ERROR nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7e967d97-acf7-4035-8404-668980138d1a, please check neutron logs for more information. [ 515.221860] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Traceback (most recent call last): [ 515.221860] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 515.221860] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] yield resources [ 515.221860] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 515.221860] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] self.driver.spawn(context, instance, image_meta, [ 515.221860] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 515.221860] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] self._vmops.spawn(context, instance, image_meta, injected_files, [ 515.221860] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 515.221860] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] vm_ref = self.build_virtual_machine(instance, [ 515.221860] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] vif_infos = vmwarevif.get_vif_info(self._session, [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] for vif in network_info: [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] return self._sync_wrapper(fn, *args, **kwargs) [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] self.wait() [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] self[:] = self._gt.wait() [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] return self._exit_event.wait() [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 515.222246] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] result = hub.switch() [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] return self.greenlet.switch() [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] result = function(*args, **kwargs) [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] return func(*args, **kwargs) [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] raise e [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] nwinfo = self.network_api.allocate_for_instance( [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] created_port_ids = self._update_ports_for_instance( [ 515.222630] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] with excutils.save_and_reraise_exception(): [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] self.force_reraise() [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] raise self.value [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] updated_port = self._update_port( [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] _ensure_no_port_binding_failure(port) [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] raise exception.PortBindingFailed(port_id=port['id']) [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] nova.exception.PortBindingFailed: Binding failed for port 7e967d97-acf7-4035-8404-668980138d1a, please check neutron logs for more information. [ 515.222994] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] [ 515.225189] env[61898]: INFO nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Terminating instance [ 515.340492] env[61898]: DEBUG nova.policy [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af0ab02e7a064f769416ad69491b07a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c2b938df1d1d4d61ab15fa58ea8f8419', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 515.386065] env[61898]: DEBUG nova.network.neutron [req-f4e81f7f-4471-4b0c-8e4f-eb9abd5053a8 req-682c3811-e409-4be7-b2f3-36999f106715 service nova] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 515.409419] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3178c4e9-0186-4995-a7a8-1a771152b103 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.419816] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142153b2-488f-413d-87f7-f07911dbbe49 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.452449] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d348ca4f-e6a3-4583-a57c-13c881b0108f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.460439] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8701f031-320b-4c5f-ad48-4c9127cc6f2a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.482258] env[61898]: DEBUG nova.compute.provider_tree [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 515.653207] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Acquiring lock "ceaef138-8b51-428b-9966-e6db3e7b0eb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.653441] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Lock "ceaef138-8b51-428b-9966-e6db3e7b0eb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.663038] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b6dfd3c-174f-43c5-a46a-219268982439 tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.470s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 515.715746] env[61898]: DEBUG nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 515.728360] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquiring lock "refresh_cache-052ff2f0-770a-4511-ae0c-e351ad987904" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 515.728551] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquired lock "refresh_cache-052ff2f0-770a-4511-ae0c-e351ad987904" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 515.728727] env[61898]: DEBUG nova.network.neutron [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 515.894556] env[61898]: DEBUG oslo_concurrency.lockutils [req-f4e81f7f-4471-4b0c-8e4f-eb9abd5053a8 req-682c3811-e409-4be7-b2f3-36999f106715 service nova] Releasing lock "refresh_cache-63f32d41-18e3-4918-981d-10e8f22423b8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 515.955549] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Acquiring lock "8e5a01e2-67ba-4832-815f-34767deba62f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.960861] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Lock "8e5a01e2-67ba-4832-815f-34767deba62f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.990153] env[61898]: DEBUG nova.scheduler.client.report [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 516.027180] env[61898]: DEBUG nova.network.neutron [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Successfully created port: 12ad4367-208a-4766-a8c3-418b18cb8e66 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 516.044696] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Acquiring lock "5d11fc94-b63a-475c-bcb3-f212c838668c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.044977] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Lock "5d11fc94-b63a-475c-bcb3-f212c838668c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.160430] env[61898]: DEBUG nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 516.268436] env[61898]: DEBUG nova.network.neutron [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 516.453582] env[61898]: ERROR nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f, please check neutron logs for more information. [ 516.453582] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 516.453582] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 516.453582] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 516.453582] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 516.453582] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 516.453582] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 516.453582] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 516.453582] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 516.453582] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 516.453582] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 516.453582] env[61898]: ERROR nova.compute.manager raise self.value [ 516.453582] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 516.453582] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 516.453582] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 516.453582] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 516.454243] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 516.454243] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 516.454243] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f, please check neutron logs for more information. [ 516.454243] env[61898]: ERROR nova.compute.manager [ 516.454243] env[61898]: Traceback (most recent call last): [ 516.454243] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 516.454243] env[61898]: listener.cb(fileno) [ 516.454243] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 516.454243] env[61898]: result = function(*args, **kwargs) [ 516.454243] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 516.454243] env[61898]: return func(*args, **kwargs) [ 516.454243] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 516.454243] env[61898]: raise e [ 516.454243] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 516.454243] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 516.454243] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 516.454243] env[61898]: created_port_ids = self._update_ports_for_instance( [ 516.454243] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 516.454243] env[61898]: with excutils.save_and_reraise_exception(): [ 516.454243] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 516.454243] env[61898]: self.force_reraise() [ 516.454243] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 516.454243] env[61898]: raise self.value [ 516.454243] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 516.454243] env[61898]: updated_port = self._update_port( [ 516.454243] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 516.454243] env[61898]: _ensure_no_port_binding_failure(port) [ 516.454243] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 516.454243] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 516.455539] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f, please check neutron logs for more information. [ 516.455539] env[61898]: Removing descriptor: 19 [ 516.455539] env[61898]: ERROR nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f, please check neutron logs for more information. [ 516.455539] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Traceback (most recent call last): [ 516.455539] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 516.455539] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] yield resources [ 516.455539] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 516.455539] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] self.driver.spawn(context, instance, image_meta, [ 516.455539] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 516.455539] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 516.455539] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 516.455539] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] vm_ref = self.build_virtual_machine(instance, [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] vif_infos = vmwarevif.get_vif_info(self._session, [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] for vif in network_info: [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] return self._sync_wrapper(fn, *args, **kwargs) [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] self.wait() [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] self[:] = self._gt.wait() [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] return self._exit_event.wait() [ 516.456216] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] result = hub.switch() [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] return self.greenlet.switch() [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] result = function(*args, **kwargs) [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] return func(*args, **kwargs) [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] raise e [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] nwinfo = self.network_api.allocate_for_instance( [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 516.456533] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] created_port_ids = self._update_ports_for_instance( [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] with excutils.save_and_reraise_exception(): [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] self.force_reraise() [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] raise self.value [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] updated_port = self._update_port( [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] _ensure_no_port_binding_failure(port) [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 516.456965] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] raise exception.PortBindingFailed(port_id=port['id']) [ 516.457366] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] nova.exception.PortBindingFailed: Binding failed for port 3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f, please check neutron logs for more information. [ 516.457366] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] [ 516.457366] env[61898]: INFO nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Terminating instance [ 516.463979] env[61898]: DEBUG nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 516.496099] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.790s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 516.496791] env[61898]: ERROR nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1172a41c-e8d2-41a4-95d9-b361dfa1fe4a, please check neutron logs for more information. [ 516.496791] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Traceback (most recent call last): [ 516.496791] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 516.496791] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] self.driver.spawn(context, instance, image_meta, [ 516.496791] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 516.496791] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 516.496791] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 516.496791] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] vm_ref = self.build_virtual_machine(instance, [ 516.496791] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 516.496791] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] vif_infos = vmwarevif.get_vif_info(self._session, [ 516.496791] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] for vif in network_info: [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] return self._sync_wrapper(fn, *args, **kwargs) [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] self.wait() [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] self[:] = self._gt.wait() [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] return self._exit_event.wait() [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] result = hub.switch() [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 516.497109] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] return self.greenlet.switch() [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] result = function(*args, **kwargs) [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] return func(*args, **kwargs) [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] raise e [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] nwinfo = self.network_api.allocate_for_instance( [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] created_port_ids = self._update_ports_for_instance( [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] with excutils.save_and_reraise_exception(): [ 516.497431] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 516.497771] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] self.force_reraise() [ 516.497771] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 516.497771] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] raise self.value [ 516.497771] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 516.497771] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] updated_port = self._update_port( [ 516.497771] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 516.497771] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] _ensure_no_port_binding_failure(port) [ 516.497771] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 516.497771] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] raise exception.PortBindingFailed(port_id=port['id']) [ 516.497771] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] nova.exception.PortBindingFailed: Binding failed for port 1172a41c-e8d2-41a4-95d9-b361dfa1fe4a, please check neutron logs for more information. [ 516.497771] env[61898]: ERROR nova.compute.manager [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] [ 516.498056] env[61898]: DEBUG nova.compute.utils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Binding failed for port 1172a41c-e8d2-41a4-95d9-b361dfa1fe4a, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 516.499209] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.025s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.500458] env[61898]: INFO nova.compute.claims [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 516.508910] env[61898]: DEBUG nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Build of instance 40fd8af8-586c-4292-9acf-fe211337b69a was re-scheduled: Binding failed for port 1172a41c-e8d2-41a4-95d9-b361dfa1fe4a, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 516.509423] env[61898]: DEBUG nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 516.509709] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquiring lock "refresh_cache-40fd8af8-586c-4292-9acf-fe211337b69a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 516.509787] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquired lock "refresh_cache-40fd8af8-586c-4292-9acf-fe211337b69a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 516.509997] env[61898]: DEBUG nova.network.neutron [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 516.517372] env[61898]: DEBUG nova.network.neutron [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 516.547557] env[61898]: DEBUG nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 516.694199] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.733687] env[61898]: DEBUG nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 516.766386] env[61898]: DEBUG nova.virt.hardware [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 516.767075] env[61898]: DEBUG nova.virt.hardware [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 516.767075] env[61898]: DEBUG nova.virt.hardware [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 516.767226] env[61898]: DEBUG nova.virt.hardware [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 516.767807] env[61898]: DEBUG nova.virt.hardware [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 516.767807] env[61898]: DEBUG nova.virt.hardware [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 516.767972] env[61898]: DEBUG nova.virt.hardware [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 516.768207] env[61898]: DEBUG nova.virt.hardware [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 516.768408] env[61898]: DEBUG nova.virt.hardware [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 516.768602] env[61898]: DEBUG nova.virt.hardware [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 516.768779] env[61898]: DEBUG nova.virt.hardware [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 516.769667] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e55f10f-35f9-437b-982c-c72891285806 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.778528] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80560cf-7cc0-4a03-8511-3b8225b704c5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.962710] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquiring lock "refresh_cache-4c02ebbd-345f-4253-bc28-f90c731c78aa" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 516.962710] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquired lock "refresh_cache-4c02ebbd-345f-4253-bc28-f90c731c78aa" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 516.962710] env[61898]: DEBUG nova.network.neutron [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 516.985556] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.020480] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Releasing lock "refresh_cache-052ff2f0-770a-4511-ae0c-e351ad987904" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 517.020881] env[61898]: DEBUG nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 517.021303] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 517.022161] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6531da64-2705-4c5a-9064-2206b4f58986 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.035255] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1435f191-dbbb-4f87-8c55-67fb7137cb8d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.049178] env[61898]: DEBUG nova.network.neutron [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 517.065404] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 052ff2f0-770a-4511-ae0c-e351ad987904 could not be found. [ 517.066388] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 517.066743] env[61898]: INFO nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Took 0.05 seconds to destroy the instance on the hypervisor. [ 517.067138] env[61898]: DEBUG oslo.service.loopingcall [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 517.067542] env[61898]: DEBUG nova.compute.manager [-] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 517.068924] env[61898]: DEBUG nova.network.neutron [-] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 517.076294] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.101058] env[61898]: DEBUG nova.network.neutron [-] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 517.179165] env[61898]: DEBUG nova.network.neutron [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 517.249734] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Acquiring lock "4b39d9ad-b7d3-4464-b9e0-799440b445e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.250680] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Lock "4b39d9ad-b7d3-4464-b9e0-799440b445e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.499304] env[61898]: DEBUG nova.network.neutron [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 517.608111] env[61898]: DEBUG nova.network.neutron [-] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 517.635429] env[61898]: ERROR nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 12ad4367-208a-4766-a8c3-418b18cb8e66, please check neutron logs for more information. [ 517.635429] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 517.635429] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 517.635429] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 517.635429] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 517.635429] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 517.635429] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 517.635429] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 517.635429] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 517.635429] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 517.635429] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 517.635429] env[61898]: ERROR nova.compute.manager raise self.value [ 517.635429] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 517.635429] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 517.635429] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 517.635429] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 517.635886] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 517.635886] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 517.635886] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 12ad4367-208a-4766-a8c3-418b18cb8e66, please check neutron logs for more information. [ 517.635886] env[61898]: ERROR nova.compute.manager [ 517.635886] env[61898]: Traceback (most recent call last): [ 517.635886] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 517.635886] env[61898]: listener.cb(fileno) [ 517.635886] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 517.635886] env[61898]: result = function(*args, **kwargs) [ 517.635886] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 517.635886] env[61898]: return func(*args, **kwargs) [ 517.635886] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 517.635886] env[61898]: raise e [ 517.635886] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 517.635886] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 517.635886] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 517.635886] env[61898]: created_port_ids = self._update_ports_for_instance( [ 517.635886] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 517.635886] env[61898]: with excutils.save_and_reraise_exception(): [ 517.635886] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 517.635886] env[61898]: self.force_reraise() [ 517.635886] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 517.635886] env[61898]: raise self.value [ 517.635886] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 517.635886] env[61898]: updated_port = self._update_port( [ 517.635886] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 517.635886] env[61898]: _ensure_no_port_binding_failure(port) [ 517.635886] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 517.635886] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 517.637360] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 12ad4367-208a-4766-a8c3-418b18cb8e66, please check neutron logs for more information. [ 517.637360] env[61898]: Removing descriptor: 18 [ 517.637360] env[61898]: ERROR nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 12ad4367-208a-4766-a8c3-418b18cb8e66, please check neutron logs for more information. [ 517.637360] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Traceback (most recent call last): [ 517.637360] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 517.637360] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] yield resources [ 517.637360] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 517.637360] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] self.driver.spawn(context, instance, image_meta, [ 517.637360] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 517.637360] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] self._vmops.spawn(context, instance, image_meta, injected_files, [ 517.637360] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 517.637360] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] vm_ref = self.build_virtual_machine(instance, [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] vif_infos = vmwarevif.get_vif_info(self._session, [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] for vif in network_info: [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] return self._sync_wrapper(fn, *args, **kwargs) [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] self.wait() [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] self[:] = self._gt.wait() [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] return self._exit_event.wait() [ 517.637781] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] result = hub.switch() [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] return self.greenlet.switch() [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] result = function(*args, **kwargs) [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] return func(*args, **kwargs) [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] raise e [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] nwinfo = self.network_api.allocate_for_instance( [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 517.639353] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] created_port_ids = self._update_ports_for_instance( [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] with excutils.save_and_reraise_exception(): [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] self.force_reraise() [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] raise self.value [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] updated_port = self._update_port( [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] _ensure_no_port_binding_failure(port) [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 517.639828] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] raise exception.PortBindingFailed(port_id=port['id']) [ 517.640186] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] nova.exception.PortBindingFailed: Binding failed for port 12ad4367-208a-4766-a8c3-418b18cb8e66, please check neutron logs for more information. [ 517.640186] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] [ 517.640186] env[61898]: INFO nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Terminating instance [ 517.652484] env[61898]: DEBUG nova.network.neutron [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 517.682612] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Releasing lock "refresh_cache-40fd8af8-586c-4292-9acf-fe211337b69a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 517.682829] env[61898]: DEBUG nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 517.685058] env[61898]: DEBUG nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 517.685058] env[61898]: DEBUG nova.network.neutron [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 517.711315] env[61898]: DEBUG nova.network.neutron [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 517.740636] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fd1c36-c53f-4392-aa15-0f24c350a14b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.748322] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189bfd5d-3287-4547-adef-16f4e04c9b9d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.783326] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e538e0bc-c381-4fc3-a757-a5392feebfb0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.791768] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95cef16-fb61-4ee8-ab73-d802d3780c71 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.805775] env[61898]: DEBUG nova.compute.provider_tree [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 518.088800] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "b521cc8c-e214-467f-8399-55f075b9bba3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.089062] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "b521cc8c-e214-467f-8399-55f075b9bba3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.109956] env[61898]: INFO nova.compute.manager [-] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Took 1.04 seconds to deallocate network for instance. [ 518.113563] env[61898]: DEBUG nova.compute.claims [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 518.114105] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.143533] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Acquiring lock "refresh_cache-ba29c234-4f7b-414c-9b6b-6a2fa68e9533" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 518.143735] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Acquired lock "refresh_cache-ba29c234-4f7b-414c-9b6b-6a2fa68e9533" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 518.143929] env[61898]: DEBUG nova.network.neutron [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 518.155496] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Releasing lock "refresh_cache-4c02ebbd-345f-4253-bc28-f90c731c78aa" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 518.155780] env[61898]: DEBUG nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 518.156597] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 518.156597] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d47aeb9-2107-4804-b408-08fd34efe90c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.168057] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33526e17-1a62-44d9-ae5f-e564334d7ca7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.193819] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4c02ebbd-345f-4253-bc28-f90c731c78aa could not be found. [ 518.194099] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 518.194596] env[61898]: INFO nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 518.194596] env[61898]: DEBUG oslo.service.loopingcall [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 518.194756] env[61898]: DEBUG nova.compute.manager [-] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 518.195124] env[61898]: DEBUG nova.network.neutron [-] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 518.210809] env[61898]: DEBUG nova.network.neutron [-] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 518.213598] env[61898]: DEBUG nova.network.neutron [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 518.312994] env[61898]: DEBUG nova.scheduler.client.report [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 518.664949] env[61898]: DEBUG nova.network.neutron [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 518.715025] env[61898]: DEBUG nova.network.neutron [-] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 518.716257] env[61898]: INFO nova.compute.manager [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 40fd8af8-586c-4292-9acf-fe211337b69a] Took 1.03 seconds to deallocate network for instance. [ 518.741032] env[61898]: DEBUG nova.compute.manager [None req-f40caf58-c062-4c71-b350-d4c1698697c9 tempest-ServerDiagnosticsV248Test-819306233 tempest-ServerDiagnosticsV248Test-819306233-project-admin] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 518.741032] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d281a94f-270f-48fd-8bf9-d0c33d7b0a67 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.749286] env[61898]: INFO nova.compute.manager [None req-f40caf58-c062-4c71-b350-d4c1698697c9 tempest-ServerDiagnosticsV248Test-819306233 tempest-ServerDiagnosticsV248Test-819306233-project-admin] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Retrieving diagnostics [ 518.750347] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324095d2-8f7a-43dd-bfb3-b0b68b27df30 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.792256] env[61898]: DEBUG nova.network.neutron [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 518.824923] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.326s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.825668] env[61898]: DEBUG nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 518.828168] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.445s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.831143] env[61898]: INFO nova.compute.claims [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 519.221092] env[61898]: INFO nova.compute.manager [-] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Took 1.03 seconds to deallocate network for instance. [ 519.226996] env[61898]: DEBUG nova.compute.claims [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 519.227196] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.298290] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Releasing lock "refresh_cache-ba29c234-4f7b-414c-9b6b-6a2fa68e9533" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 519.298290] env[61898]: DEBUG nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 519.298290] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 519.298290] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-627aaf0f-0311-4d9f-8db6-1f40149a39c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.304499] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26b8d4f-6ab8-457f-8bbe-4d116743a17f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.327393] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ba29c234-4f7b-414c-9b6b-6a2fa68e9533 could not be found. [ 519.327803] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 519.328076] env[61898]: INFO nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Took 0.03 seconds to destroy the instance on the hypervisor. [ 519.328417] env[61898]: DEBUG oslo.service.loopingcall [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 519.328685] env[61898]: DEBUG nova.compute.manager [-] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 519.328822] env[61898]: DEBUG nova.network.neutron [-] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 519.333468] env[61898]: DEBUG nova.compute.utils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 519.338050] env[61898]: DEBUG nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 519.338475] env[61898]: DEBUG nova.network.neutron [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 519.351157] env[61898]: DEBUG nova.network.neutron [-] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 519.385308] env[61898]: DEBUG nova.policy [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '230eea5c8e4a45a7b3bc1c19fc816251', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0de0c72b7669432b8e65d80c81749a7a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 519.514355] env[61898]: DEBUG nova.compute.manager [req-117eed82-73fb-4145-9cc7-2f86b02800fc req-3bf2d4a4-5702-44e4-bd71-2509e9dabbb1 service nova] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Received event network-vif-deleted-3e565297-dbb9-4d50-97af-752995c852a2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 519.639468] env[61898]: DEBUG nova.network.neutron [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Successfully created port: 82e047c2-8942-448f-b74f-5813bc4551fc {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 519.755681] env[61898]: INFO nova.scheduler.client.report [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Deleted allocations for instance 40fd8af8-586c-4292-9acf-fe211337b69a [ 519.791064] env[61898]: DEBUG nova.compute.manager [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Received event network-changed-7e967d97-acf7-4035-8404-668980138d1a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 519.791289] env[61898]: DEBUG nova.compute.manager [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Refreshing instance network info cache due to event network-changed-7e967d97-acf7-4035-8404-668980138d1a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 519.791507] env[61898]: DEBUG oslo_concurrency.lockutils [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] Acquiring lock "refresh_cache-052ff2f0-770a-4511-ae0c-e351ad987904" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.791646] env[61898]: DEBUG oslo_concurrency.lockutils [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] Acquired lock "refresh_cache-052ff2f0-770a-4511-ae0c-e351ad987904" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.791799] env[61898]: DEBUG nova.network.neutron [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Refreshing network info cache for port 7e967d97-acf7-4035-8404-668980138d1a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 519.838207] env[61898]: DEBUG nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 519.854709] env[61898]: DEBUG nova.network.neutron [-] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.036053] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d563155-3897-4b6e-9534-1d95bfcc7877 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.044261] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce9e8c8-3f83-4d05-b5ea-1df187a556f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.078960] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2adac1-22ae-4428-98ac-348700cff2ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.086491] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d7fd6b2-4838-41ce-b93f-03156b66e410 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.100999] env[61898]: DEBUG nova.compute.provider_tree [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 520.264313] env[61898]: DEBUG oslo_concurrency.lockutils [None req-104d3217-6e40-4b6d-ac2a-d1437a99de87 tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "40fd8af8-586c-4292-9acf-fe211337b69a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.053s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.313329] env[61898]: DEBUG nova.network.neutron [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 520.359021] env[61898]: INFO nova.compute.manager [-] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Took 1.03 seconds to deallocate network for instance. [ 520.361527] env[61898]: DEBUG nova.compute.claims [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 520.362129] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.397932] env[61898]: DEBUG nova.network.neutron [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.575237] env[61898]: ERROR nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 82e047c2-8942-448f-b74f-5813bc4551fc, please check neutron logs for more information. [ 520.575237] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 520.575237] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 520.575237] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 520.575237] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 520.575237] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 520.575237] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 520.575237] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 520.575237] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.575237] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 520.575237] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.575237] env[61898]: ERROR nova.compute.manager raise self.value [ 520.575237] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 520.575237] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 520.575237] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.575237] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 520.575744] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.575744] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 520.575744] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 82e047c2-8942-448f-b74f-5813bc4551fc, please check neutron logs for more information. [ 520.575744] env[61898]: ERROR nova.compute.manager [ 520.575744] env[61898]: Traceback (most recent call last): [ 520.575744] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 520.575744] env[61898]: listener.cb(fileno) [ 520.575744] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.575744] env[61898]: result = function(*args, **kwargs) [ 520.575744] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 520.575744] env[61898]: return func(*args, **kwargs) [ 520.575744] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 520.575744] env[61898]: raise e [ 520.575744] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 520.575744] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 520.575744] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 520.575744] env[61898]: created_port_ids = self._update_ports_for_instance( [ 520.575744] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 520.575744] env[61898]: with excutils.save_and_reraise_exception(): [ 520.575744] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.575744] env[61898]: self.force_reraise() [ 520.575744] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.575744] env[61898]: raise self.value [ 520.575744] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 520.575744] env[61898]: updated_port = self._update_port( [ 520.575744] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.575744] env[61898]: _ensure_no_port_binding_failure(port) [ 520.575744] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.575744] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 520.577281] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 82e047c2-8942-448f-b74f-5813bc4551fc, please check neutron logs for more information. [ 520.577281] env[61898]: Removing descriptor: 18 [ 520.606057] env[61898]: DEBUG nova.scheduler.client.report [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 520.650134] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.650364] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 520.767550] env[61898]: DEBUG nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 520.853309] env[61898]: DEBUG nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 520.883251] env[61898]: DEBUG nova.virt.hardware [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 520.883513] env[61898]: DEBUG nova.virt.hardware [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 520.883667] env[61898]: DEBUG nova.virt.hardware [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 520.883851] env[61898]: DEBUG nova.virt.hardware [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 520.883995] env[61898]: DEBUG nova.virt.hardware [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 520.884173] env[61898]: DEBUG nova.virt.hardware [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 520.884381] env[61898]: DEBUG nova.virt.hardware [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 520.884534] env[61898]: DEBUG nova.virt.hardware [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 520.884698] env[61898]: DEBUG nova.virt.hardware [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 520.884856] env[61898]: DEBUG nova.virt.hardware [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 520.885046] env[61898]: DEBUG nova.virt.hardware [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 520.885913] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c02961-5afd-44ea-acd1-bbdcccac4107 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.893504] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7d6436-dcb0-4007-9966-18441ca8ab42 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.906719] env[61898]: DEBUG oslo_concurrency.lockutils [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] Releasing lock "refresh_cache-052ff2f0-770a-4511-ae0c-e351ad987904" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 520.906976] env[61898]: DEBUG nova.compute.manager [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Received event network-vif-deleted-7e967d97-acf7-4035-8404-668980138d1a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 520.907179] env[61898]: DEBUG nova.compute.manager [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Received event network-changed-3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 520.907422] env[61898]: DEBUG nova.compute.manager [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Refreshing instance network info cache due to event network-changed-3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 520.907536] env[61898]: DEBUG oslo_concurrency.lockutils [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] Acquiring lock "refresh_cache-4c02ebbd-345f-4253-bc28-f90c731c78aa" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 520.907671] env[61898]: DEBUG oslo_concurrency.lockutils [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] Acquired lock "refresh_cache-4c02ebbd-345f-4253-bc28-f90c731c78aa" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 520.907823] env[61898]: DEBUG nova.network.neutron [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Refreshing network info cache for port 3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 520.909468] env[61898]: ERROR nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 82e047c2-8942-448f-b74f-5813bc4551fc, please check neutron logs for more information. [ 520.909468] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Traceback (most recent call last): [ 520.909468] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 520.909468] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] yield resources [ 520.909468] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 520.909468] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] self.driver.spawn(context, instance, image_meta, [ 520.909468] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 520.909468] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] self._vmops.spawn(context, instance, image_meta, injected_files, [ 520.909468] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 520.909468] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] vm_ref = self.build_virtual_machine(instance, [ 520.909468] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] vif_infos = vmwarevif.get_vif_info(self._session, [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] for vif in network_info: [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] return self._sync_wrapper(fn, *args, **kwargs) [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] self.wait() [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] self[:] = self._gt.wait() [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] return self._exit_event.wait() [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 520.909810] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] current.throw(*self._exc) [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] result = function(*args, **kwargs) [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] return func(*args, **kwargs) [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] raise e [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] nwinfo = self.network_api.allocate_for_instance( [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] created_port_ids = self._update_ports_for_instance( [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] with excutils.save_and_reraise_exception(): [ 520.910229] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 520.910656] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] self.force_reraise() [ 520.910656] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 520.910656] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] raise self.value [ 520.910656] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 520.910656] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] updated_port = self._update_port( [ 520.910656] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 520.910656] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] _ensure_no_port_binding_failure(port) [ 520.910656] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 520.910656] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] raise exception.PortBindingFailed(port_id=port['id']) [ 520.910656] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] nova.exception.PortBindingFailed: Binding failed for port 82e047c2-8942-448f-b74f-5813bc4551fc, please check neutron logs for more information. [ 520.910656] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] [ 520.910656] env[61898]: INFO nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Terminating instance [ 521.112935] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.283s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.112935] env[61898]: DEBUG nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 521.119749] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.648s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.303779] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.357926] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Acquiring lock "c1c15498-af88-4fcf-9a58-7060502bcaf2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.358243] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Lock "c1c15498-af88-4fcf-9a58-7060502bcaf2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.358496] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Acquiring lock "3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.358727] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Lock "3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.414404] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Acquiring lock "refresh_cache-57f77a5f-87d0-4a9a-80ea-4b24baf33d02" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 521.414602] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Acquired lock "refresh_cache-57f77a5f-87d0-4a9a-80ea-4b24baf33d02" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 521.414824] env[61898]: DEBUG nova.network.neutron [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 521.433443] env[61898]: DEBUG nova.network.neutron [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 521.524063] env[61898]: DEBUG nova.network.neutron [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 521.620690] env[61898]: DEBUG nova.compute.utils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 521.622212] env[61898]: DEBUG nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 521.622448] env[61898]: DEBUG nova.network.neutron [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 521.728984] env[61898]: DEBUG nova.policy [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e11b7c3f5284e2899ca94d664c21a8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e5808095ec7443aa7d36cb070fa014d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 521.888313] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774bb019-77d8-44f0-82d2-0dea8e36cbbd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.898341] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d49e19-0fbe-4d16-9853-92e4f61361c1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.938706] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde133f3-af22-417d-9bd9-7e4ca94867f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.951403] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2640c76b-8864-4f39-84a2-39e176fb83b3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.967514] env[61898]: DEBUG nova.compute.provider_tree [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 521.971892] env[61898]: DEBUG nova.network.neutron [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 522.028135] env[61898]: DEBUG oslo_concurrency.lockutils [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] Releasing lock "refresh_cache-4c02ebbd-345f-4253-bc28-f90c731c78aa" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 522.028658] env[61898]: DEBUG nova.compute.manager [req-f76d61b5-a07b-4e07-b20a-b4ae9132f1a2 req-52edd087-08d9-4bb0-adb3-e172c742aa6f service nova] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Received event network-vif-deleted-3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 522.117896] env[61898]: DEBUG nova.network.neutron [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 522.126597] env[61898]: DEBUG nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 522.260309] env[61898]: DEBUG nova.network.neutron [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Successfully created port: 46220313-cfd2-4aa5-9379-894cdd6a4ccf {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 522.475240] env[61898]: DEBUG nova.scheduler.client.report [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 522.625388] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Releasing lock "refresh_cache-57f77a5f-87d0-4a9a-80ea-4b24baf33d02" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 522.625388] env[61898]: DEBUG nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 522.625851] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 522.625851] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c68ae8bf-20be-46ff-9caa-c1dab0d37889 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.644870] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e37efdd-d400-4696-8d68-bf8533abd16c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.669180] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 57f77a5f-87d0-4a9a-80ea-4b24baf33d02 could not be found. [ 522.669180] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 522.669337] env[61898]: INFO nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Took 0.04 seconds to destroy the instance on the hypervisor. [ 522.669662] env[61898]: DEBUG oslo.service.loopingcall [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 522.669894] env[61898]: DEBUG nova.compute.manager [-] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 522.670025] env[61898]: DEBUG nova.network.neutron [-] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 522.698715] env[61898]: DEBUG nova.network.neutron [-] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 522.716832] env[61898]: DEBUG nova.compute.manager [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Received event network-changed-12ad4367-208a-4766-a8c3-418b18cb8e66 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 522.717227] env[61898]: DEBUG nova.compute.manager [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Refreshing instance network info cache due to event network-changed-12ad4367-208a-4766-a8c3-418b18cb8e66. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 522.717306] env[61898]: DEBUG oslo_concurrency.lockutils [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] Acquiring lock "refresh_cache-ba29c234-4f7b-414c-9b6b-6a2fa68e9533" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 522.717422] env[61898]: DEBUG oslo_concurrency.lockutils [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] Acquired lock "refresh_cache-ba29c234-4f7b-414c-9b6b-6a2fa68e9533" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 522.717713] env[61898]: DEBUG nova.network.neutron [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Refreshing network info cache for port 12ad4367-208a-4766-a8c3-418b18cb8e66 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 522.980984] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.861s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 522.982264] env[61898]: ERROR nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3e565297-dbb9-4d50-97af-752995c852a2, please check neutron logs for more information. [ 522.982264] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Traceback (most recent call last): [ 522.982264] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 522.982264] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] self.driver.spawn(context, instance, image_meta, [ 522.982264] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 522.982264] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 522.982264] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 522.982264] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] vm_ref = self.build_virtual_machine(instance, [ 522.982264] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 522.982264] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] vif_infos = vmwarevif.get_vif_info(self._session, [ 522.982264] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] for vif in network_info: [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] return self._sync_wrapper(fn, *args, **kwargs) [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] self.wait() [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] self[:] = self._gt.wait() [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] return self._exit_event.wait() [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] result = hub.switch() [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 522.982641] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] return self.greenlet.switch() [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] result = function(*args, **kwargs) [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] return func(*args, **kwargs) [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] raise e [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] nwinfo = self.network_api.allocate_for_instance( [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] created_port_ids = self._update_ports_for_instance( [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] with excutils.save_and_reraise_exception(): [ 522.982956] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 522.983273] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] self.force_reraise() [ 522.983273] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 522.983273] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] raise self.value [ 522.983273] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 522.983273] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] updated_port = self._update_port( [ 522.983273] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 522.983273] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] _ensure_no_port_binding_failure(port) [ 522.983273] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 522.983273] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] raise exception.PortBindingFailed(port_id=port['id']) [ 522.983273] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] nova.exception.PortBindingFailed: Binding failed for port 3e565297-dbb9-4d50-97af-752995c852a2, please check neutron logs for more information. [ 522.983273] env[61898]: ERROR nova.compute.manager [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] [ 522.983580] env[61898]: DEBUG nova.compute.utils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Binding failed for port 3e565297-dbb9-4d50-97af-752995c852a2, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 522.985593] env[61898]: DEBUG nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Build of instance 63f32d41-18e3-4918-981d-10e8f22423b8 was re-scheduled: Binding failed for port 3e565297-dbb9-4d50-97af-752995c852a2, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 522.986116] env[61898]: DEBUG nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 522.986444] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Acquiring lock "refresh_cache-63f32d41-18e3-4918-981d-10e8f22423b8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 522.986710] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Acquired lock "refresh_cache-63f32d41-18e3-4918-981d-10e8f22423b8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 522.987116] env[61898]: DEBUG nova.network.neutron [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 522.991695] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.297s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.992899] env[61898]: INFO nova.compute.claims [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 523.139091] env[61898]: DEBUG nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 523.153404] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.154725] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 523.155627] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 523.155744] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Rebuilding the list of instances to heal {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 523.170883] env[61898]: DEBUG nova.virt.hardware [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 523.171148] env[61898]: DEBUG nova.virt.hardware [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 523.171321] env[61898]: DEBUG nova.virt.hardware [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 523.171828] env[61898]: DEBUG nova.virt.hardware [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 523.171828] env[61898]: DEBUG nova.virt.hardware [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 523.171828] env[61898]: DEBUG nova.virt.hardware [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 523.172087] env[61898]: DEBUG nova.virt.hardware [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 523.172194] env[61898]: DEBUG nova.virt.hardware [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 523.172348] env[61898]: DEBUG nova.virt.hardware [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 523.172503] env[61898]: DEBUG nova.virt.hardware [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 523.172668] env[61898]: DEBUG nova.virt.hardware [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 523.173574] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191d57a9-2dfc-4a0e-9345-eed529ce5e8e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.185786] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1c6074-aea2-4437-94d4-dd2e7dcdd4dd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.202781] env[61898]: DEBUG nova.network.neutron [-] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.255215] env[61898]: DEBUG nova.network.neutron [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 523.321796] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "0dfabd80-a385-4124-af33-083559819d7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.322021] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "0dfabd80-a385-4124-af33-083559819d7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.533940] env[61898]: DEBUG nova.network.neutron [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 523.593018] env[61898]: DEBUG nova.network.neutron [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.665227] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.665421] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.665552] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.665674] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.665791] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.665906] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 523.709196] env[61898]: INFO nova.compute.manager [-] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Took 1.04 seconds to deallocate network for instance. [ 523.714149] env[61898]: DEBUG nova.compute.claims [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 523.714149] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.714951] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "refresh_cache-8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.718418] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquired lock "refresh_cache-8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.718610] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Forcefully refreshing network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 523.718773] env[61898]: DEBUG nova.objects.instance [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lazy-loading 'info_cache' on Instance uuid 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 523.749984] env[61898]: DEBUG nova.network.neutron [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.915779] env[61898]: ERROR nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 46220313-cfd2-4aa5-9379-894cdd6a4ccf, please check neutron logs for more information. [ 523.915779] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 523.915779] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.915779] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 523.915779] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 523.915779] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 523.915779] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 523.915779] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 523.915779] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.915779] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 523.915779] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.915779] env[61898]: ERROR nova.compute.manager raise self.value [ 523.915779] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 523.915779] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 523.915779] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.915779] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 523.916323] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.916323] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 523.916323] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 46220313-cfd2-4aa5-9379-894cdd6a4ccf, please check neutron logs for more information. [ 523.916323] env[61898]: ERROR nova.compute.manager [ 523.916323] env[61898]: Traceback (most recent call last): [ 523.916323] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 523.916323] env[61898]: listener.cb(fileno) [ 523.916323] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.916323] env[61898]: result = function(*args, **kwargs) [ 523.916323] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 523.916323] env[61898]: return func(*args, **kwargs) [ 523.916323] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 523.916323] env[61898]: raise e [ 523.916323] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.916323] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 523.916323] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 523.916323] env[61898]: created_port_ids = self._update_ports_for_instance( [ 523.916323] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 523.916323] env[61898]: with excutils.save_and_reraise_exception(): [ 523.916323] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.916323] env[61898]: self.force_reraise() [ 523.916323] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.916323] env[61898]: raise self.value [ 523.916323] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 523.916323] env[61898]: updated_port = self._update_port( [ 523.916323] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.916323] env[61898]: _ensure_no_port_binding_failure(port) [ 523.916323] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.916323] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 523.917066] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 46220313-cfd2-4aa5-9379-894cdd6a4ccf, please check neutron logs for more information. [ 523.917066] env[61898]: Removing descriptor: 18 [ 523.917066] env[61898]: ERROR nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 46220313-cfd2-4aa5-9379-894cdd6a4ccf, please check neutron logs for more information. [ 523.917066] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Traceback (most recent call last): [ 523.917066] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 523.917066] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] yield resources [ 523.917066] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 523.917066] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] self.driver.spawn(context, instance, image_meta, [ 523.917066] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 523.917066] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 523.917066] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 523.917066] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] vm_ref = self.build_virtual_machine(instance, [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] vif_infos = vmwarevif.get_vif_info(self._session, [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] for vif in network_info: [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] return self._sync_wrapper(fn, *args, **kwargs) [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] self.wait() [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] self[:] = self._gt.wait() [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] return self._exit_event.wait() [ 523.922255] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] result = hub.switch() [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] return self.greenlet.switch() [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] result = function(*args, **kwargs) [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] return func(*args, **kwargs) [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] raise e [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] nwinfo = self.network_api.allocate_for_instance( [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 523.924100] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] created_port_ids = self._update_ports_for_instance( [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] with excutils.save_and_reraise_exception(): [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] self.force_reraise() [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] raise self.value [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] updated_port = self._update_port( [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] _ensure_no_port_binding_failure(port) [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.924635] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] raise exception.PortBindingFailed(port_id=port['id']) [ 523.925156] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] nova.exception.PortBindingFailed: Binding failed for port 46220313-cfd2-4aa5-9379-894cdd6a4ccf, please check neutron logs for more information. [ 523.925156] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] [ 523.925156] env[61898]: INFO nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Terminating instance [ 524.096699] env[61898]: DEBUG oslo_concurrency.lockutils [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] Releasing lock "refresh_cache-ba29c234-4f7b-414c-9b6b-6a2fa68e9533" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.097306] env[61898]: DEBUG nova.compute.manager [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Received event network-vif-deleted-12ad4367-208a-4766-a8c3-418b18cb8e66 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 524.097976] env[61898]: DEBUG nova.compute.manager [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Received event network-changed-82e047c2-8942-448f-b74f-5813bc4551fc {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 524.098503] env[61898]: DEBUG nova.compute.manager [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Refreshing instance network info cache due to event network-changed-82e047c2-8942-448f-b74f-5813bc4551fc. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 524.098953] env[61898]: DEBUG oslo_concurrency.lockutils [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] Acquiring lock "refresh_cache-57f77a5f-87d0-4a9a-80ea-4b24baf33d02" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.099150] env[61898]: DEBUG oslo_concurrency.lockutils [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] Acquired lock "refresh_cache-57f77a5f-87d0-4a9a-80ea-4b24baf33d02" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.099365] env[61898]: DEBUG nova.network.neutron [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Refreshing network info cache for port 82e047c2-8942-448f-b74f-5813bc4551fc {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 524.256172] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Releasing lock "refresh_cache-63f32d41-18e3-4918-981d-10e8f22423b8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.256172] env[61898]: DEBUG nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 524.256172] env[61898]: DEBUG nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 524.256568] env[61898]: DEBUG nova.network.neutron [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 524.274670] env[61898]: DEBUG nova.network.neutron [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 524.314129] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a035586-3354-48ac-8d66-a58a0ee94bc1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.324088] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5278b271-89bf-4fab-9311-755d05844231 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.361197] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a477ee85-811e-4939-ada8-16fce433dd4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.370619] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5da6363-c7f4-4a77-afca-9eb8695ac2de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.387105] env[61898]: DEBUG nova.compute.provider_tree [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 524.404166] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Acquiring lock "cf94c3f2-a4db-479f-8251-f2e403697678" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.404407] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Lock "cf94c3f2-a4db-479f-8251-f2e403697678" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.420120] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Acquiring lock "refresh_cache-8c006d14-dbd9-40b9-a474-06382fea3b2f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.420120] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Acquired lock "refresh_cache-8c006d14-dbd9-40b9-a474-06382fea3b2f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.420120] env[61898]: DEBUG nova.network.neutron [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 524.636566] env[61898]: DEBUG nova.network.neutron [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 524.770105] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 524.777453] env[61898]: DEBUG nova.network.neutron [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.851193] env[61898]: DEBUG nova.network.neutron [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.890852] env[61898]: DEBUG nova.scheduler.client.report [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 524.959930] env[61898]: DEBUG nova.network.neutron [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 525.126098] env[61898]: DEBUG nova.network.neutron [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.281999] env[61898]: INFO nova.compute.manager [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] [instance: 63f32d41-18e3-4918-981d-10e8f22423b8] Took 1.03 seconds to deallocate network for instance. [ 525.354447] env[61898]: DEBUG oslo_concurrency.lockutils [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] Releasing lock "refresh_cache-57f77a5f-87d0-4a9a-80ea-4b24baf33d02" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.354447] env[61898]: DEBUG nova.compute.manager [req-128d6139-fcc0-4d6d-aa66-cab4e9a2a881 req-149d37ea-f2f3-4854-b595-cc0290734243 service nova] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Received event network-vif-deleted-82e047c2-8942-448f-b74f-5813bc4551fc {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 525.400563] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.401594] env[61898]: DEBUG nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 525.410111] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.422s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.410111] env[61898]: INFO nova.compute.claims [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 525.481227] env[61898]: DEBUG nova.compute.manager [req-b7560c2e-43d7-4e70-92ff-e645c1ba72c7 req-8d5f5596-e336-4526-b88f-a7005ef8ff0d service nova] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Received event network-changed-46220313-cfd2-4aa5-9379-894cdd6a4ccf {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 525.481227] env[61898]: DEBUG nova.compute.manager [req-b7560c2e-43d7-4e70-92ff-e645c1ba72c7 req-8d5f5596-e336-4526-b88f-a7005ef8ff0d service nova] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Refreshing instance network info cache due to event network-changed-46220313-cfd2-4aa5-9379-894cdd6a4ccf. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 525.481227] env[61898]: DEBUG oslo_concurrency.lockutils [req-b7560c2e-43d7-4e70-92ff-e645c1ba72c7 req-8d5f5596-e336-4526-b88f-a7005ef8ff0d service nova] Acquiring lock "refresh_cache-8c006d14-dbd9-40b9-a474-06382fea3b2f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.572975] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.629425] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Releasing lock "refresh_cache-8c006d14-dbd9-40b9-a474-06382fea3b2f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.630474] env[61898]: DEBUG nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 525.630774] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 525.631646] env[61898]: DEBUG oslo_concurrency.lockutils [req-b7560c2e-43d7-4e70-92ff-e645c1ba72c7 req-8d5f5596-e336-4526-b88f-a7005ef8ff0d service nova] Acquired lock "refresh_cache-8c006d14-dbd9-40b9-a474-06382fea3b2f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.631875] env[61898]: DEBUG nova.network.neutron [req-b7560c2e-43d7-4e70-92ff-e645c1ba72c7 req-8d5f5596-e336-4526-b88f-a7005ef8ff0d service nova] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Refreshing network info cache for port 46220313-cfd2-4aa5-9379-894cdd6a4ccf {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 525.637148] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23a8711f-2015-44cd-8e89-9bfadf0893a3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.645639] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5a9314-a439-4ce8-8325-849b4e5de0ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.679270] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8c006d14-dbd9-40b9-a474-06382fea3b2f could not be found. [ 525.679511] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 525.679686] env[61898]: INFO nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 525.679921] env[61898]: DEBUG oslo.service.loopingcall [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 525.680140] env[61898]: DEBUG nova.compute.manager [-] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 525.680299] env[61898]: DEBUG nova.network.neutron [-] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 525.728238] env[61898]: DEBUG nova.network.neutron [-] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 525.919333] env[61898]: DEBUG nova.compute.utils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 525.921850] env[61898]: DEBUG nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 525.921850] env[61898]: DEBUG nova.network.neutron [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 526.001791] env[61898]: DEBUG nova.policy [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '398f1820ecd74ca6bc5dd973705e3d68', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'edacfc61f99a43d087bb2647a5362d73', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 526.076099] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Releasing lock "refresh_cache-8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.076317] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Updated the network info_cache for instance {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 526.076526] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 526.076884] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 526.076987] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 526.077519] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 526.077519] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 526.078299] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 526.118218] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquiring lock "31aa8536-1597-4b61-b069-80daf5306dd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.118218] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "31aa8536-1597-4b61-b069-80daf5306dd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.182628] env[61898]: DEBUG nova.network.neutron [req-b7560c2e-43d7-4e70-92ff-e645c1ba72c7 req-8d5f5596-e336-4526-b88f-a7005ef8ff0d service nova] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 526.233517] env[61898]: DEBUG nova.network.neutron [-] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.325084] env[61898]: INFO nova.scheduler.client.report [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Deleted allocations for instance 63f32d41-18e3-4918-981d-10e8f22423b8 [ 526.336922] env[61898]: DEBUG nova.network.neutron [req-b7560c2e-43d7-4e70-92ff-e645c1ba72c7 req-8d5f5596-e336-4526-b88f-a7005ef8ff0d service nova] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.424499] env[61898]: DEBUG nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 526.588942] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Getting list of instances from cluster (obj){ [ 526.588942] env[61898]: value = "domain-c8" [ 526.588942] env[61898]: _type = "ClusterComputeResource" [ 526.588942] env[61898]: } {{(pid=61898) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 526.590074] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad1dea6-8fd5-426f-bbdf-e9ec623fe648 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.601050] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Got total of 1 instances {{(pid=61898) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 526.601189] env[61898]: WARNING nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] While synchronizing instance power states, found 8 instances in the database and 1 instances on the hypervisor. [ 526.601336] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9 {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 526.601523] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid 052ff2f0-770a-4511-ae0c-e351ad987904 {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 526.601674] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid 4c02ebbd-345f-4253-bc28-f90c731c78aa {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 526.601822] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid ba29c234-4f7b-414c-9b6b-6a2fa68e9533 {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 526.601966] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid 57f77a5f-87d0-4a9a-80ea-4b24baf33d02 {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 526.602196] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid 8c006d14-dbd9-40b9-a474-06382fea3b2f {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 526.602255] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid ceaef138-8b51-428b-9966-e6db3e7b0eb3 {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 526.602400] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid 8e5a01e2-67ba-4832-815f-34767deba62f {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 526.605171] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.605609] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.605688] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "052ff2f0-770a-4511-ae0c-e351ad987904" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.605863] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "4c02ebbd-345f-4253-bc28-f90c731c78aa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.606482] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "ba29c234-4f7b-414c-9b6b-6a2fa68e9533" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.606701] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "57f77a5f-87d0-4a9a-80ea-4b24baf33d02" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.606897] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "8c006d14-dbd9-40b9-a474-06382fea3b2f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.607124] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "ceaef138-8b51-428b-9966-e6db3e7b0eb3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.607380] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "8e5a01e2-67ba-4832-815f-34767deba62f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.607614] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 526.607829] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61898) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 526.608588] env[61898]: DEBUG nova.network.neutron [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Successfully created port: d6cdf4d1-28f5-4d43-9339-5656f128329e {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 526.611357] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adb9bf9-5576-4350-bd87-b0016ff11138 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.614173] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 526.740903] env[61898]: INFO nova.compute.manager [-] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Took 1.06 seconds to deallocate network for instance. [ 526.747616] env[61898]: DEBUG nova.compute.claims [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 526.747797] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.754448] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64813521-2793-4cc1-80e6-2965e7dfbdce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.763121] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd206465-24a5-4783-b623-d095c4f771a8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.796018] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96190829-aaa0-4218-9ac9-5079da9d37f5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.806020] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05aa807a-f4c0-4cbe-8747-8ea880d99527 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.819106] env[61898]: DEBUG nova.compute.provider_tree [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 526.835184] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b7c4974f-e75c-4a48-9ea6-3eb5372d549a tempest-InstanceActionsV221TestJSON-2057405247 tempest-InstanceActionsV221TestJSON-2057405247-project-member] Lock "63f32d41-18e3-4918-981d-10e8f22423b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.956s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 526.841129] env[61898]: DEBUG oslo_concurrency.lockutils [req-b7560c2e-43d7-4e70-92ff-e645c1ba72c7 req-8d5f5596-e336-4526-b88f-a7005ef8ff0d service nova] Releasing lock "refresh_cache-8c006d14-dbd9-40b9-a474-06382fea3b2f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.841893] env[61898]: DEBUG nova.compute.manager [req-b7560c2e-43d7-4e70-92ff-e645c1ba72c7 req-8d5f5596-e336-4526-b88f-a7005ef8ff0d service nova] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Received event network-vif-deleted-46220313-cfd2-4aa5-9379-894cdd6a4ccf {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 527.123189] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.123967] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.518s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.322382] env[61898]: DEBUG nova.scheduler.client.report [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 527.337568] env[61898]: DEBUG nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 527.439129] env[61898]: DEBUG nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 527.464669] env[61898]: DEBUG nova.virt.hardware [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 527.465617] env[61898]: DEBUG nova.virt.hardware [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 527.465617] env[61898]: DEBUG nova.virt.hardware [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 527.465685] env[61898]: DEBUG nova.virt.hardware [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 527.466286] env[61898]: DEBUG nova.virt.hardware [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 527.466286] env[61898]: DEBUG nova.virt.hardware [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 527.466395] env[61898]: DEBUG nova.virt.hardware [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 527.466513] env[61898]: DEBUG nova.virt.hardware [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 527.466740] env[61898]: DEBUG nova.virt.hardware [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 527.466945] env[61898]: DEBUG nova.virt.hardware [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 527.467227] env[61898]: DEBUG nova.virt.hardware [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 527.468624] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cbcb2e7-a996-452d-99d7-0a38b3ec7967 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.477444] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bddd63a0-0f47-4062-bf03-abbf3a5e3db2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.828431] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.829013] env[61898]: DEBUG nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 527.832954] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.755s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.833285] env[61898]: INFO nova.compute.claims [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 527.870728] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 528.343243] env[61898]: DEBUG nova.compute.utils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 528.347631] env[61898]: DEBUG nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 528.348022] env[61898]: DEBUG nova.network.neutron [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 528.459143] env[61898]: ERROR nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d6cdf4d1-28f5-4d43-9339-5656f128329e, please check neutron logs for more information. [ 528.459143] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 528.459143] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 528.459143] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 528.459143] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.459143] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 528.459143] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.459143] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 528.459143] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.459143] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 528.459143] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.459143] env[61898]: ERROR nova.compute.manager raise self.value [ 528.459143] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.459143] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 528.459143] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.459143] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 528.459613] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.459613] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 528.459613] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d6cdf4d1-28f5-4d43-9339-5656f128329e, please check neutron logs for more information. [ 528.459613] env[61898]: ERROR nova.compute.manager [ 528.459613] env[61898]: Traceback (most recent call last): [ 528.459613] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 528.459613] env[61898]: listener.cb(fileno) [ 528.459613] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 528.459613] env[61898]: result = function(*args, **kwargs) [ 528.459613] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 528.459613] env[61898]: return func(*args, **kwargs) [ 528.459613] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 528.459613] env[61898]: raise e [ 528.459613] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 528.459613] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 528.459613] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.459613] env[61898]: created_port_ids = self._update_ports_for_instance( [ 528.459613] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.459613] env[61898]: with excutils.save_and_reraise_exception(): [ 528.459613] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.459613] env[61898]: self.force_reraise() [ 528.459613] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.459613] env[61898]: raise self.value [ 528.459613] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.459613] env[61898]: updated_port = self._update_port( [ 528.459613] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.459613] env[61898]: _ensure_no_port_binding_failure(port) [ 528.459613] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.459613] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 528.462950] env[61898]: nova.exception.PortBindingFailed: Binding failed for port d6cdf4d1-28f5-4d43-9339-5656f128329e, please check neutron logs for more information. [ 528.462950] env[61898]: Removing descriptor: 18 [ 528.462950] env[61898]: ERROR nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d6cdf4d1-28f5-4d43-9339-5656f128329e, please check neutron logs for more information. [ 528.462950] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Traceback (most recent call last): [ 528.462950] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 528.462950] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] yield resources [ 528.462950] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 528.462950] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] self.driver.spawn(context, instance, image_meta, [ 528.462950] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 528.462950] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 528.462950] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 528.462950] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] vm_ref = self.build_virtual_machine(instance, [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] vif_infos = vmwarevif.get_vif_info(self._session, [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] for vif in network_info: [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] return self._sync_wrapper(fn, *args, **kwargs) [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] self.wait() [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] self[:] = self._gt.wait() [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] return self._exit_event.wait() [ 528.463773] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] result = hub.switch() [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] return self.greenlet.switch() [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] result = function(*args, **kwargs) [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] return func(*args, **kwargs) [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] raise e [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] nwinfo = self.network_api.allocate_for_instance( [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 528.464238] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] created_port_ids = self._update_ports_for_instance( [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] with excutils.save_and_reraise_exception(): [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] self.force_reraise() [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] raise self.value [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] updated_port = self._update_port( [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] _ensure_no_port_binding_failure(port) [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 528.464585] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] raise exception.PortBindingFailed(port_id=port['id']) [ 528.464900] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] nova.exception.PortBindingFailed: Binding failed for port d6cdf4d1-28f5-4d43-9339-5656f128329e, please check neutron logs for more information. [ 528.464900] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] [ 528.464900] env[61898]: INFO nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Terminating instance [ 528.618760] env[61898]: DEBUG nova.policy [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1287bfdb68d540cd92544f1d1735a0ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b082832d275e4db183f3a7547f5a4ef1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 528.667873] env[61898]: DEBUG nova.compute.manager [req-cd2a7f71-6173-462d-8353-9192a0a0bc0e req-862f3171-6d2c-4d0e-9cbe-6ba3424a55a3 service nova] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Received event network-changed-d6cdf4d1-28f5-4d43-9339-5656f128329e {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 528.668064] env[61898]: DEBUG nova.compute.manager [req-cd2a7f71-6173-462d-8353-9192a0a0bc0e req-862f3171-6d2c-4d0e-9cbe-6ba3424a55a3 service nova] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Refreshing instance network info cache due to event network-changed-d6cdf4d1-28f5-4d43-9339-5656f128329e. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 528.668653] env[61898]: DEBUG oslo_concurrency.lockutils [req-cd2a7f71-6173-462d-8353-9192a0a0bc0e req-862f3171-6d2c-4d0e-9cbe-6ba3424a55a3 service nova] Acquiring lock "refresh_cache-ceaef138-8b51-428b-9966-e6db3e7b0eb3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 528.668653] env[61898]: DEBUG oslo_concurrency.lockutils [req-cd2a7f71-6173-462d-8353-9192a0a0bc0e req-862f3171-6d2c-4d0e-9cbe-6ba3424a55a3 service nova] Acquired lock "refresh_cache-ceaef138-8b51-428b-9966-e6db3e7b0eb3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 528.668653] env[61898]: DEBUG nova.network.neutron [req-cd2a7f71-6173-462d-8353-9192a0a0bc0e req-862f3171-6d2c-4d0e-9cbe-6ba3424a55a3 service nova] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Refreshing network info cache for port d6cdf4d1-28f5-4d43-9339-5656f128329e {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 528.856726] env[61898]: DEBUG nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 528.968826] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Acquiring lock "refresh_cache-ceaef138-8b51-428b-9966-e6db3e7b0eb3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.118489] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f38d0bc-e3e2-48d1-b84b-8c0a16cd7cc1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.127583] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ec69c6-042b-4723-adaf-ba5a0c41d2f2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.163683] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f269ab4-909e-4821-b126-562e95076bf8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.171579] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124c1dc3-c231-4fc1-97fe-e6fac7e7d589 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.188033] env[61898]: DEBUG nova.compute.provider_tree [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 529.209075] env[61898]: DEBUG nova.network.neutron [req-cd2a7f71-6173-462d-8353-9192a0a0bc0e req-862f3171-6d2c-4d0e-9cbe-6ba3424a55a3 service nova] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 529.286158] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquiring lock "7ef91986-fb46-478b-85a5-05d597790ad9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.286158] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "7ef91986-fb46-478b-85a5-05d597790ad9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.432145] env[61898]: DEBUG nova.network.neutron [req-cd2a7f71-6173-462d-8353-9192a0a0bc0e req-862f3171-6d2c-4d0e-9cbe-6ba3424a55a3 service nova] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.455880] env[61898]: DEBUG nova.network.neutron [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Successfully created port: 69e1ee22-79a2-476a-93cf-869da323bd9b {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 529.691847] env[61898]: DEBUG nova.scheduler.client.report [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 529.872357] env[61898]: DEBUG nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 529.902048] env[61898]: DEBUG nova.virt.hardware [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 529.902048] env[61898]: DEBUG nova.virt.hardware [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 529.902048] env[61898]: DEBUG nova.virt.hardware [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 529.902248] env[61898]: DEBUG nova.virt.hardware [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 529.902248] env[61898]: DEBUG nova.virt.hardware [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 529.902248] env[61898]: DEBUG nova.virt.hardware [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 529.903674] env[61898]: DEBUG nova.virt.hardware [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 529.903951] env[61898]: DEBUG nova.virt.hardware [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 529.904165] env[61898]: DEBUG nova.virt.hardware [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 529.904335] env[61898]: DEBUG nova.virt.hardware [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 529.904522] env[61898]: DEBUG nova.virt.hardware [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 529.905452] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cadedb-128a-41fb-aef9-459cdfa93f9e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.916361] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284df3fa-5bc7-4cf5-8a45-1c3dd4cfab06 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.935668] env[61898]: DEBUG oslo_concurrency.lockutils [req-cd2a7f71-6173-462d-8353-9192a0a0bc0e req-862f3171-6d2c-4d0e-9cbe-6ba3424a55a3 service nova] Releasing lock "refresh_cache-ceaef138-8b51-428b-9966-e6db3e7b0eb3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.936138] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Acquired lock "refresh_cache-ceaef138-8b51-428b-9966-e6db3e7b0eb3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.936138] env[61898]: DEBUG nova.network.neutron [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 530.150634] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Acquiring lock "ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.150865] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Lock "ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.199177] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.367s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 530.199504] env[61898]: DEBUG nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 530.204366] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.091s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.298651] env[61898]: DEBUG nova.compute.manager [None req-e51480a7-944c-4447-9971-e3ec599da5d8 tempest-ServerDiagnosticsV248Test-819306233 tempest-ServerDiagnosticsV248Test-819306233-project-admin] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 530.300862] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98539023-70cb-4bf2-92f6-e0d81ad4b9a8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.311969] env[61898]: INFO nova.compute.manager [None req-e51480a7-944c-4447-9971-e3ec599da5d8 tempest-ServerDiagnosticsV248Test-819306233 tempest-ServerDiagnosticsV248Test-819306233-project-admin] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Retrieving diagnostics [ 530.313075] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57c37d0-eed9-4d06-8660-7a6f41d8a111 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.500176] env[61898]: DEBUG nova.network.neutron [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 530.712402] env[61898]: DEBUG nova.compute.utils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 530.726527] env[61898]: DEBUG nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 530.727033] env[61898]: DEBUG nova.network.neutron [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 530.863266] env[61898]: DEBUG nova.network.neutron [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.918920] env[61898]: DEBUG nova.policy [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3aea1cd3ead47bfb2d41f88f6e4cd0f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7b75087a2cf44d65a2863fee47397ecf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 531.082161] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f8aa90-24d9-4efa-a859-7ebe176a6fa7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.094978] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463a2c8c-45d5-48fc-91ce-200be999f46b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.142720] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9052696-548b-4c33-830d-eec4dde3470d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.152110] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbcaf686-63f2-405a-87ba-aed68a474ea0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.167292] env[61898]: DEBUG nova.compute.provider_tree [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 531.227512] env[61898]: DEBUG nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 531.366772] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Releasing lock "refresh_cache-ceaef138-8b51-428b-9966-e6db3e7b0eb3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 531.367544] env[61898]: DEBUG nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 531.367544] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 531.367920] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d871ea40-6495-44b6-a7f2-8f6d922a899e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.383389] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3074f53e-b63d-4093-b1a9-d9e0ff239e23 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.414185] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ceaef138-8b51-428b-9966-e6db3e7b0eb3 could not be found. [ 531.414185] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 531.414185] env[61898]: INFO nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 531.414185] env[61898]: DEBUG oslo.service.loopingcall [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 531.414185] env[61898]: DEBUG nova.compute.manager [-] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 531.414185] env[61898]: DEBUG nova.network.neutron [-] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 531.449220] env[61898]: DEBUG nova.network.neutron [-] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 531.673404] env[61898]: DEBUG nova.scheduler.client.report [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 531.710854] env[61898]: DEBUG nova.compute.manager [req-cb5e73af-074a-4bd0-b980-b0efc64f8c31 req-7b35e7e9-c048-4c03-b501-e97312f2131e service nova] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Received event network-vif-deleted-d6cdf4d1-28f5-4d43-9339-5656f128329e {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 531.799417] env[61898]: DEBUG nova.network.neutron [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Successfully created port: d2e48e6a-4b40-4d6e-b811-06ff79636e7a {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 531.952157] env[61898]: DEBUG nova.network.neutron [-] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.178925] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.974s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.179663] env[61898]: ERROR nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7e967d97-acf7-4035-8404-668980138d1a, please check neutron logs for more information. [ 532.179663] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Traceback (most recent call last): [ 532.179663] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 532.179663] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] self.driver.spawn(context, instance, image_meta, [ 532.179663] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 532.179663] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] self._vmops.spawn(context, instance, image_meta, injected_files, [ 532.179663] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 532.179663] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] vm_ref = self.build_virtual_machine(instance, [ 532.179663] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 532.179663] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] vif_infos = vmwarevif.get_vif_info(self._session, [ 532.179663] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] for vif in network_info: [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] return self._sync_wrapper(fn, *args, **kwargs) [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] self.wait() [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] self[:] = self._gt.wait() [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] return self._exit_event.wait() [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] result = hub.switch() [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 532.180025] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] return self.greenlet.switch() [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] result = function(*args, **kwargs) [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] return func(*args, **kwargs) [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] raise e [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] nwinfo = self.network_api.allocate_for_instance( [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] created_port_ids = self._update_ports_for_instance( [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] with excutils.save_and_reraise_exception(): [ 532.180411] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 532.180751] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] self.force_reraise() [ 532.180751] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 532.180751] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] raise self.value [ 532.180751] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 532.180751] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] updated_port = self._update_port( [ 532.180751] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 532.180751] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] _ensure_no_port_binding_failure(port) [ 532.180751] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 532.180751] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] raise exception.PortBindingFailed(port_id=port['id']) [ 532.180751] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] nova.exception.PortBindingFailed: Binding failed for port 7e967d97-acf7-4035-8404-668980138d1a, please check neutron logs for more information. [ 532.180751] env[61898]: ERROR nova.compute.manager [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] [ 532.181057] env[61898]: DEBUG nova.compute.utils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Binding failed for port 7e967d97-acf7-4035-8404-668980138d1a, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 532.182384] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.954s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.194336] env[61898]: DEBUG nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Build of instance 052ff2f0-770a-4511-ae0c-e351ad987904 was re-scheduled: Binding failed for port 7e967d97-acf7-4035-8404-668980138d1a, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 532.194800] env[61898]: DEBUG nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 532.195065] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquiring lock "refresh_cache-052ff2f0-770a-4511-ae0c-e351ad987904" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.195226] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquired lock "refresh_cache-052ff2f0-770a-4511-ae0c-e351ad987904" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.195404] env[61898]: DEBUG nova.network.neutron [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 532.238982] env[61898]: DEBUG nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 532.268215] env[61898]: DEBUG nova.virt.hardware [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 532.268517] env[61898]: DEBUG nova.virt.hardware [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 532.268780] env[61898]: DEBUG nova.virt.hardware [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 532.268954] env[61898]: DEBUG nova.virt.hardware [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 532.269014] env[61898]: DEBUG nova.virt.hardware [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 532.269950] env[61898]: DEBUG nova.virt.hardware [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 532.270239] env[61898]: DEBUG nova.virt.hardware [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 532.270408] env[61898]: DEBUG nova.virt.hardware [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 532.270654] env[61898]: DEBUG nova.virt.hardware [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 532.270771] env[61898]: DEBUG nova.virt.hardware [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 532.270938] env[61898]: DEBUG nova.virt.hardware [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 532.271838] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add66a48-431d-45cb-806a-394442cbd02e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.281938] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f3c013-78f7-4cbf-bd43-171c0df89faf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.312052] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquiring lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.312052] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.312052] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquiring lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.312052] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.312255] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.314244] env[61898]: INFO nova.compute.manager [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Terminating instance [ 532.455291] env[61898]: INFO nova.compute.manager [-] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Took 1.04 seconds to deallocate network for instance. [ 532.459230] env[61898]: DEBUG nova.compute.claims [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 532.459230] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.751397] env[61898]: DEBUG nova.network.neutron [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 532.823128] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquiring lock "refresh_cache-8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 532.823128] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquired lock "refresh_cache-8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 532.823128] env[61898]: DEBUG nova.network.neutron [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 532.924913] env[61898]: DEBUG nova.network.neutron [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.048303] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c875c0-1e2f-4c26-bd29-04860ebd8264 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.058681] env[61898]: ERROR nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 69e1ee22-79a2-476a-93cf-869da323bd9b, please check neutron logs for more information. [ 533.058681] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 533.058681] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 533.058681] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 533.058681] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 533.058681] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 533.058681] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 533.058681] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 533.058681] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.058681] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 533.058681] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.058681] env[61898]: ERROR nova.compute.manager raise self.value [ 533.058681] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 533.058681] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 533.058681] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.058681] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 533.059174] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.059174] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 533.059174] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 69e1ee22-79a2-476a-93cf-869da323bd9b, please check neutron logs for more information. [ 533.059174] env[61898]: ERROR nova.compute.manager [ 533.059174] env[61898]: Traceback (most recent call last): [ 533.059174] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 533.059174] env[61898]: listener.cb(fileno) [ 533.059174] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.059174] env[61898]: result = function(*args, **kwargs) [ 533.059174] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 533.059174] env[61898]: return func(*args, **kwargs) [ 533.059174] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 533.059174] env[61898]: raise e [ 533.059174] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 533.059174] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 533.059174] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 533.059174] env[61898]: created_port_ids = self._update_ports_for_instance( [ 533.059174] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 533.059174] env[61898]: with excutils.save_and_reraise_exception(): [ 533.059174] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.059174] env[61898]: self.force_reraise() [ 533.059174] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.059174] env[61898]: raise self.value [ 533.059174] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 533.059174] env[61898]: updated_port = self._update_port( [ 533.059174] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.059174] env[61898]: _ensure_no_port_binding_failure(port) [ 533.059174] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.059174] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 533.059944] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 69e1ee22-79a2-476a-93cf-869da323bd9b, please check neutron logs for more information. [ 533.059944] env[61898]: Removing descriptor: 19 [ 533.059944] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c24abf0-257c-44ba-8910-575a54c4e7f5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.063911] env[61898]: ERROR nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 69e1ee22-79a2-476a-93cf-869da323bd9b, please check neutron logs for more information. [ 533.063911] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Traceback (most recent call last): [ 533.063911] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 533.063911] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] yield resources [ 533.063911] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 533.063911] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] self.driver.spawn(context, instance, image_meta, [ 533.063911] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 533.063911] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 533.063911] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 533.063911] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] vm_ref = self.build_virtual_machine(instance, [ 533.063911] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] vif_infos = vmwarevif.get_vif_info(self._session, [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] for vif in network_info: [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] return self._sync_wrapper(fn, *args, **kwargs) [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] self.wait() [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] self[:] = self._gt.wait() [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] return self._exit_event.wait() [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 533.064268] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] result = hub.switch() [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] return self.greenlet.switch() [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] result = function(*args, **kwargs) [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] return func(*args, **kwargs) [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] raise e [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] nwinfo = self.network_api.allocate_for_instance( [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] created_port_ids = self._update_ports_for_instance( [ 533.064610] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] with excutils.save_and_reraise_exception(): [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] self.force_reraise() [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] raise self.value [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] updated_port = self._update_port( [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] _ensure_no_port_binding_failure(port) [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] raise exception.PortBindingFailed(port_id=port['id']) [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] nova.exception.PortBindingFailed: Binding failed for port 69e1ee22-79a2-476a-93cf-869da323bd9b, please check neutron logs for more information. [ 533.064964] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] [ 533.065307] env[61898]: INFO nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Terminating instance [ 533.105816] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc3532a-e4c8-4c88-a515-6d8013dbe59f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.114291] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07c1cd9-ce5a-4517-8b32-fafe99b9cfe8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.132236] env[61898]: DEBUG nova.compute.provider_tree [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 533.395729] env[61898]: DEBUG nova.network.neutron [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.429818] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Releasing lock "refresh_cache-052ff2f0-770a-4511-ae0c-e351ad987904" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 533.430068] env[61898]: DEBUG nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 533.430931] env[61898]: DEBUG nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 533.430931] env[61898]: DEBUG nova.network.neutron [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 533.468175] env[61898]: DEBUG nova.network.neutron [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 533.570425] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Acquiring lock "refresh_cache-8e5a01e2-67ba-4832-815f-34767deba62f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.570877] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Acquired lock "refresh_cache-8e5a01e2-67ba-4832-815f-34767deba62f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.570877] env[61898]: DEBUG nova.network.neutron [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 533.578084] env[61898]: DEBUG nova.network.neutron [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.632954] env[61898]: DEBUG nova.scheduler.client.report [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 533.972153] env[61898]: DEBUG nova.compute.manager [req-8c5cd73a-5c9e-4e4b-9081-c46b42f59117 req-fa98041e-0d2a-4067-90f2-1a2f77f22c36 service nova] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Received event network-changed-69e1ee22-79a2-476a-93cf-869da323bd9b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 533.974817] env[61898]: DEBUG nova.compute.manager [req-8c5cd73a-5c9e-4e4b-9081-c46b42f59117 req-fa98041e-0d2a-4067-90f2-1a2f77f22c36 service nova] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Refreshing instance network info cache due to event network-changed-69e1ee22-79a2-476a-93cf-869da323bd9b. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 533.974817] env[61898]: DEBUG oslo_concurrency.lockutils [req-8c5cd73a-5c9e-4e4b-9081-c46b42f59117 req-fa98041e-0d2a-4067-90f2-1a2f77f22c36 service nova] Acquiring lock "refresh_cache-8e5a01e2-67ba-4832-815f-34767deba62f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.976440] env[61898]: DEBUG nova.network.neutron [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.083212] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Releasing lock "refresh_cache-8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.083212] env[61898]: DEBUG nova.compute.manager [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 534.083212] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 534.083212] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0926cc78-902c-4d11-aed0-816601494a64 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.093592] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 534.093695] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b0c4d30-2545-45c9-b742-fbe1b8a8e22d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.100139] env[61898]: DEBUG nova.network.neutron [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.107140] env[61898]: DEBUG oslo_vmware.api [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 534.107140] env[61898]: value = "task-1240333" [ 534.107140] env[61898]: _type = "Task" [ 534.107140] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.118757] env[61898]: DEBUG oslo_vmware.api [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240333, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.139359] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.957s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.140782] env[61898]: ERROR nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f, please check neutron logs for more information. [ 534.140782] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Traceback (most recent call last): [ 534.140782] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 534.140782] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] self.driver.spawn(context, instance, image_meta, [ 534.140782] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 534.140782] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 534.140782] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 534.140782] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] vm_ref = self.build_virtual_machine(instance, [ 534.140782] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 534.140782] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] vif_infos = vmwarevif.get_vif_info(self._session, [ 534.140782] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] for vif in network_info: [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] return self._sync_wrapper(fn, *args, **kwargs) [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] self.wait() [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] self[:] = self._gt.wait() [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] return self._exit_event.wait() [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] result = hub.switch() [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 534.141128] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] return self.greenlet.switch() [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] result = function(*args, **kwargs) [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] return func(*args, **kwargs) [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] raise e [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] nwinfo = self.network_api.allocate_for_instance( [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] created_port_ids = self._update_ports_for_instance( [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] with excutils.save_and_reraise_exception(): [ 534.141480] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.141812] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] self.force_reraise() [ 534.141812] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.141812] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] raise self.value [ 534.141812] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.141812] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] updated_port = self._update_port( [ 534.141812] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.141812] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] _ensure_no_port_binding_failure(port) [ 534.141812] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.141812] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] raise exception.PortBindingFailed(port_id=port['id']) [ 534.141812] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] nova.exception.PortBindingFailed: Binding failed for port 3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f, please check neutron logs for more information. [ 534.141812] env[61898]: ERROR nova.compute.manager [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] [ 534.145065] env[61898]: DEBUG nova.compute.utils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Binding failed for port 3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 534.146579] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.784s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.151121] env[61898]: DEBUG nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Build of instance 4c02ebbd-345f-4253-bc28-f90c731c78aa was re-scheduled: Binding failed for port 3e7d29cb-87e5-4b43-9c8d-a6ffc3ed601f, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 534.152342] env[61898]: DEBUG nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 534.154089] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquiring lock "refresh_cache-4c02ebbd-345f-4253-bc28-f90c731c78aa" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.154089] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquired lock "refresh_cache-4c02ebbd-345f-4253-bc28-f90c731c78aa" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.154089] env[61898]: DEBUG nova.network.neutron [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 534.277634] env[61898]: DEBUG nova.network.neutron [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.476226] env[61898]: ERROR nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d2e48e6a-4b40-4d6e-b811-06ff79636e7a, please check neutron logs for more information. [ 534.476226] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 534.476226] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 534.476226] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 534.476226] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.476226] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 534.476226] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.476226] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 534.476226] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.476226] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 534.476226] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.476226] env[61898]: ERROR nova.compute.manager raise self.value [ 534.476226] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.476226] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 534.476226] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.476226] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 534.476673] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.476673] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 534.476673] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d2e48e6a-4b40-4d6e-b811-06ff79636e7a, please check neutron logs for more information. [ 534.476673] env[61898]: ERROR nova.compute.manager [ 534.476673] env[61898]: Traceback (most recent call last): [ 534.476673] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 534.476673] env[61898]: listener.cb(fileno) [ 534.476673] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.476673] env[61898]: result = function(*args, **kwargs) [ 534.476673] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 534.476673] env[61898]: return func(*args, **kwargs) [ 534.476673] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 534.476673] env[61898]: raise e [ 534.476673] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 534.476673] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 534.476673] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.476673] env[61898]: created_port_ids = self._update_ports_for_instance( [ 534.476673] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.476673] env[61898]: with excutils.save_and_reraise_exception(): [ 534.476673] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.476673] env[61898]: self.force_reraise() [ 534.476673] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.476673] env[61898]: raise self.value [ 534.476673] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.476673] env[61898]: updated_port = self._update_port( [ 534.476673] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.476673] env[61898]: _ensure_no_port_binding_failure(port) [ 534.476673] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.476673] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 534.478136] env[61898]: nova.exception.PortBindingFailed: Binding failed for port d2e48e6a-4b40-4d6e-b811-06ff79636e7a, please check neutron logs for more information. [ 534.478136] env[61898]: Removing descriptor: 20 [ 534.478136] env[61898]: ERROR nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d2e48e6a-4b40-4d6e-b811-06ff79636e7a, please check neutron logs for more information. [ 534.478136] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Traceback (most recent call last): [ 534.478136] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 534.478136] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] yield resources [ 534.478136] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 534.478136] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] self.driver.spawn(context, instance, image_meta, [ 534.478136] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 534.478136] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 534.478136] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 534.478136] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] vm_ref = self.build_virtual_machine(instance, [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] vif_infos = vmwarevif.get_vif_info(self._session, [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] for vif in network_info: [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] return self._sync_wrapper(fn, *args, **kwargs) [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] self.wait() [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] self[:] = self._gt.wait() [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] return self._exit_event.wait() [ 534.478509] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] result = hub.switch() [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] return self.greenlet.switch() [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] result = function(*args, **kwargs) [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] return func(*args, **kwargs) [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] raise e [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] nwinfo = self.network_api.allocate_for_instance( [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 534.478970] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] created_port_ids = self._update_ports_for_instance( [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] with excutils.save_and_reraise_exception(): [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] self.force_reraise() [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] raise self.value [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] updated_port = self._update_port( [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] _ensure_no_port_binding_failure(port) [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.479354] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] raise exception.PortBindingFailed(port_id=port['id']) [ 534.479660] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] nova.exception.PortBindingFailed: Binding failed for port d2e48e6a-4b40-4d6e-b811-06ff79636e7a, please check neutron logs for more information. [ 534.479660] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] [ 534.479660] env[61898]: INFO nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Terminating instance [ 534.483133] env[61898]: INFO nova.compute.manager [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] Took 1.05 seconds to deallocate network for instance. [ 534.623844] env[61898]: DEBUG oslo_vmware.api [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240333, 'name': PowerOffVM_Task, 'duration_secs': 0.114036} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 534.624190] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 534.624410] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 534.624676] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d43aeeb-a6bc-4b92-90ee-4cf56fa59902 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.649388] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 534.649774] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 534.650008] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Deleting the datastore file [datastore2] 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 534.650282] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fcdbc92-45af-4b41-a599-7c2d8e6cd19b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.661744] env[61898]: DEBUG oslo_vmware.api [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for the task: (returnval){ [ 534.661744] env[61898]: value = "task-1240335" [ 534.661744] env[61898]: _type = "Task" [ 534.661744] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.670447] env[61898]: DEBUG oslo_vmware.api [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.692994] env[61898]: DEBUG nova.network.neutron [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.781061] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Releasing lock "refresh_cache-8e5a01e2-67ba-4832-815f-34767deba62f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 534.781750] env[61898]: DEBUG nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 534.781973] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 534.785203] env[61898]: DEBUG oslo_concurrency.lockutils [req-8c5cd73a-5c9e-4e4b-9081-c46b42f59117 req-fa98041e-0d2a-4067-90f2-1a2f77f22c36 service nova] Acquired lock "refresh_cache-8e5a01e2-67ba-4832-815f-34767deba62f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.785398] env[61898]: DEBUG nova.network.neutron [req-8c5cd73a-5c9e-4e4b-9081-c46b42f59117 req-fa98041e-0d2a-4067-90f2-1a2f77f22c36 service nova] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Refreshing network info cache for port 69e1ee22-79a2-476a-93cf-869da323bd9b {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 534.786455] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82eb2093-5c80-45d4-91d4-952cc7e94805 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.796441] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df611fc-15f8-42df-95c8-290f6aa0056e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.822991] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8e5a01e2-67ba-4832-815f-34767deba62f could not be found. [ 534.823248] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 534.823439] env[61898]: INFO nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 534.823686] env[61898]: DEBUG oslo.service.loopingcall [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 534.826161] env[61898]: DEBUG nova.compute.manager [-] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 534.826265] env[61898]: DEBUG nova.network.neutron [-] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 534.905613] env[61898]: DEBUG nova.network.neutron [-] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 534.957883] env[61898]: DEBUG nova.network.neutron [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.990785] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Acquiring lock "refresh_cache-5d11fc94-b63a-475c-bcb3-f212c838668c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.991140] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Acquired lock "refresh_cache-5d11fc94-b63a-475c-bcb3-f212c838668c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 534.991140] env[61898]: DEBUG nova.network.neutron [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 534.992763] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1d9515c-605f-4cf3-88c3-f319d4e35d96 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.002235] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167684c0-95c0-430a-a516-d090200e597b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.041510] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caffc9d9-7a7c-4eca-ae0e-4b6d306157a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.050107] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c981055-3cf9-4071-a689-e13fd830b4af {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.065891] env[61898]: DEBUG nova.compute.provider_tree [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 535.185758] env[61898]: DEBUG oslo_vmware.api [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Task: {'id': task-1240335, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091692} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.186211] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 535.186374] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 535.186629] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 535.186948] env[61898]: INFO nova.compute.manager [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 535.188109] env[61898]: DEBUG oslo.service.loopingcall [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 535.188882] env[61898]: DEBUG nova.compute.manager [-] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 535.188882] env[61898]: DEBUG nova.network.neutron [-] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 535.213663] env[61898]: DEBUG nova.network.neutron [-] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.325621] env[61898]: DEBUG nova.network.neutron [req-8c5cd73a-5c9e-4e4b-9081-c46b42f59117 req-fa98041e-0d2a-4067-90f2-1a2f77f22c36 service nova] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.411336] env[61898]: DEBUG nova.network.neutron [-] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.438984] env[61898]: DEBUG nova.compute.manager [req-b3a6c6bc-7bcc-47e9-92df-cfa1c4675b5c req-924d8457-6fae-4849-83c7-01517f7d5e34 service nova] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Received event network-changed-d2e48e6a-4b40-4d6e-b811-06ff79636e7a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 535.439195] env[61898]: DEBUG nova.compute.manager [req-b3a6c6bc-7bcc-47e9-92df-cfa1c4675b5c req-924d8457-6fae-4849-83c7-01517f7d5e34 service nova] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Refreshing instance network info cache due to event network-changed-d2e48e6a-4b40-4d6e-b811-06ff79636e7a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 535.439407] env[61898]: DEBUG oslo_concurrency.lockutils [req-b3a6c6bc-7bcc-47e9-92df-cfa1c4675b5c req-924d8457-6fae-4849-83c7-01517f7d5e34 service nova] Acquiring lock "refresh_cache-5d11fc94-b63a-475c-bcb3-f212c838668c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 535.460827] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Releasing lock "refresh_cache-4c02ebbd-345f-4253-bc28-f90c731c78aa" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.461029] env[61898]: DEBUG nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 535.461205] env[61898]: DEBUG nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 535.462039] env[61898]: DEBUG nova.network.neutron [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 535.477137] env[61898]: DEBUG nova.network.neutron [req-8c5cd73a-5c9e-4e4b-9081-c46b42f59117 req-fa98041e-0d2a-4067-90f2-1a2f77f22c36 service nova] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.486040] env[61898]: DEBUG nova.network.neutron [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.518632] env[61898]: DEBUG nova.network.neutron [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 535.537245] env[61898]: INFO nova.scheduler.client.report [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Deleted allocations for instance 052ff2f0-770a-4511-ae0c-e351ad987904 [ 535.572116] env[61898]: DEBUG nova.scheduler.client.report [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 535.614331] env[61898]: DEBUG nova.network.neutron [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.714504] env[61898]: DEBUG nova.network.neutron [-] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.842367] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "d74776d1-f374-4761-976c-f073b3821f42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.842790] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "d74776d1-f374-4761-976c-f073b3821f42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.918454] env[61898]: INFO nova.compute.manager [-] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Took 1.09 seconds to deallocate network for instance. [ 535.921021] env[61898]: DEBUG nova.compute.claims [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 535.921214] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.980543] env[61898]: DEBUG oslo_concurrency.lockutils [req-8c5cd73a-5c9e-4e4b-9081-c46b42f59117 req-fa98041e-0d2a-4067-90f2-1a2f77f22c36 service nova] Releasing lock "refresh_cache-8e5a01e2-67ba-4832-815f-34767deba62f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.987685] env[61898]: DEBUG nova.network.neutron [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.050874] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4528fbce-92fd-43f3-b94f-e949070ff2cb tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "052ff2f0-770a-4511-ae0c-e351ad987904" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.228s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.053254] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "052ff2f0-770a-4511-ae0c-e351ad987904" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.447s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.053254] env[61898]: INFO nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 052ff2f0-770a-4511-ae0c-e351ad987904] During sync_power_state the instance has a pending task (spawning). Skip. [ 536.053254] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "052ff2f0-770a-4511-ae0c-e351ad987904" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.089020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.943s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.089508] env[61898]: ERROR nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 12ad4367-208a-4766-a8c3-418b18cb8e66, please check neutron logs for more information. [ 536.089508] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Traceback (most recent call last): [ 536.089508] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 536.089508] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] self.driver.spawn(context, instance, image_meta, [ 536.089508] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 536.089508] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.089508] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.089508] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] vm_ref = self.build_virtual_machine(instance, [ 536.089508] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.089508] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.089508] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] for vif in network_info: [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] return self._sync_wrapper(fn, *args, **kwargs) [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] self.wait() [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] self[:] = self._gt.wait() [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] return self._exit_event.wait() [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] result = hub.switch() [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.090119] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] return self.greenlet.switch() [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] result = function(*args, **kwargs) [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] return func(*args, **kwargs) [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] raise e [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] nwinfo = self.network_api.allocate_for_instance( [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] created_port_ids = self._update_ports_for_instance( [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] with excutils.save_and_reraise_exception(): [ 536.090538] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.090981] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] self.force_reraise() [ 536.090981] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.090981] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] raise self.value [ 536.090981] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 536.090981] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] updated_port = self._update_port( [ 536.090981] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.090981] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] _ensure_no_port_binding_failure(port) [ 536.090981] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.090981] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] raise exception.PortBindingFailed(port_id=port['id']) [ 536.090981] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] nova.exception.PortBindingFailed: Binding failed for port 12ad4367-208a-4766-a8c3-418b18cb8e66, please check neutron logs for more information. [ 536.090981] env[61898]: ERROR nova.compute.manager [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] [ 536.091317] env[61898]: DEBUG nova.compute.utils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Binding failed for port 12ad4367-208a-4766-a8c3-418b18cb8e66, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 536.091786] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.788s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.093694] env[61898]: INFO nova.compute.claims [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 536.097447] env[61898]: DEBUG nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Build of instance ba29c234-4f7b-414c-9b6b-6a2fa68e9533 was re-scheduled: Binding failed for port 12ad4367-208a-4766-a8c3-418b18cb8e66, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 536.097447] env[61898]: DEBUG nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 536.097447] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Acquiring lock "refresh_cache-ba29c234-4f7b-414c-9b6b-6a2fa68e9533" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.097447] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Acquired lock "refresh_cache-ba29c234-4f7b-414c-9b6b-6a2fa68e9533" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.097720] env[61898]: DEBUG nova.network.neutron [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 536.116785] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Releasing lock "refresh_cache-5d11fc94-b63a-475c-bcb3-f212c838668c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 536.117317] env[61898]: DEBUG nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 536.117603] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 536.118057] env[61898]: DEBUG oslo_concurrency.lockutils [req-b3a6c6bc-7bcc-47e9-92df-cfa1c4675b5c req-924d8457-6fae-4849-83c7-01517f7d5e34 service nova] Acquired lock "refresh_cache-5d11fc94-b63a-475c-bcb3-f212c838668c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.118435] env[61898]: DEBUG nova.network.neutron [req-b3a6c6bc-7bcc-47e9-92df-cfa1c4675b5c req-924d8457-6fae-4849-83c7-01517f7d5e34 service nova] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Refreshing network info cache for port d2e48e6a-4b40-4d6e-b811-06ff79636e7a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 536.119741] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c67751c-ba79-4eb8-b699-7adec9000025 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.142019] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fff380f-fd5f-429d-8adb-cc5bd085314a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.173089] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5d11fc94-b63a-475c-bcb3-f212c838668c could not be found. [ 536.173480] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 536.173692] env[61898]: INFO nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Took 0.06 seconds to destroy the instance on the hypervisor. [ 536.174056] env[61898]: DEBUG oslo.service.loopingcall [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 536.174563] env[61898]: DEBUG nova.compute.manager [-] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 536.174641] env[61898]: DEBUG nova.network.neutron [-] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 536.219189] env[61898]: INFO nova.compute.manager [-] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Took 1.03 seconds to deallocate network for instance. [ 536.220238] env[61898]: DEBUG nova.network.neutron [-] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.492450] env[61898]: INFO nova.compute.manager [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] Took 1.03 seconds to deallocate network for instance. [ 536.561582] env[61898]: DEBUG nova.compute.manager [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 536.627583] env[61898]: DEBUG nova.network.neutron [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.659925] env[61898]: DEBUG nova.network.neutron [req-b3a6c6bc-7bcc-47e9-92df-cfa1c4675b5c req-924d8457-6fae-4849-83c7-01517f7d5e34 service nova] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 536.723642] env[61898]: DEBUG nova.network.neutron [-] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.729882] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.760278] env[61898]: DEBUG nova.network.neutron [req-b3a6c6bc-7bcc-47e9-92df-cfa1c4675b5c req-924d8457-6fae-4849-83c7-01517f7d5e34 service nova] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.852901] env[61898]: DEBUG nova.network.neutron [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.884202] env[61898]: DEBUG oslo_concurrency.lockutils [None req-16a04361-20bc-4a4e-8787-95c7d2ce0017 tempest-ServersListShow296Test-1560153890 tempest-ServersListShow296Test-1560153890-project-member] Acquiring lock "4357edb7-fac0-4ad2-9746-4a27de976fdd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.884491] env[61898]: DEBUG oslo_concurrency.lockutils [None req-16a04361-20bc-4a4e-8787-95c7d2ce0017 tempest-ServersListShow296Test-1560153890 tempest-ServersListShow296Test-1560153890-project-member] Lock "4357edb7-fac0-4ad2-9746-4a27de976fdd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.985192] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Acquiring lock "f2f968db-d4e2-451d-afe6-330196eba6c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.985939] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Lock "f2f968db-d4e2-451d-afe6-330196eba6c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.026272] env[61898]: DEBUG nova.compute.manager [req-0df8d56f-b627-496b-9c4b-05c1e1b0ad58 req-dd6ed105-1715-443a-bf82-caf4609acde0 service nova] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Received event network-vif-deleted-69e1ee22-79a2-476a-93cf-869da323bd9b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 537.091061] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.230332] env[61898]: INFO nova.compute.manager [-] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Took 1.06 seconds to deallocate network for instance. [ 537.235177] env[61898]: DEBUG nova.compute.claims [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 537.235418] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.268030] env[61898]: DEBUG oslo_concurrency.lockutils [req-b3a6c6bc-7bcc-47e9-92df-cfa1c4675b5c req-924d8457-6fae-4849-83c7-01517f7d5e34 service nova] Releasing lock "refresh_cache-5d11fc94-b63a-475c-bcb3-f212c838668c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.292391] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "69ad75e8-dcfc-499a-8f18-bf38575968be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.292681] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "69ad75e8-dcfc-499a-8f18-bf38575968be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.359083] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Releasing lock "refresh_cache-ba29c234-4f7b-414c-9b6b-6a2fa68e9533" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.359378] env[61898]: DEBUG nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 537.359505] env[61898]: DEBUG nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 537.359650] env[61898]: DEBUG nova.network.neutron [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 537.399126] env[61898]: DEBUG nova.network.neutron [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 537.443502] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5795d8df-c7ac-4f38-aedf-93c9cb13fa8d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.454795] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857c6246-95c0-4517-9d29-27d1c7c6d309 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.487130] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8d5ff4-3bc1-4d5c-b4ec-42ed27e552f1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.496023] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd2b4da-610f-458b-a135-4cb8c5f87041 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.510331] env[61898]: DEBUG nova.compute.provider_tree [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 537.531404] env[61898]: INFO nova.scheduler.client.report [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Deleted allocations for instance 4c02ebbd-345f-4253-bc28-f90c731c78aa [ 537.906583] env[61898]: DEBUG nova.network.neutron [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.013338] env[61898]: DEBUG nova.scheduler.client.report [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 538.039936] env[61898]: DEBUG oslo_concurrency.lockutils [None req-882f2f0c-ee63-4635-b483-3a1e12ac4077 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "4c02ebbd-345f-4253-bc28-f90c731c78aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.810s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.041213] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "4c02ebbd-345f-4253-bc28-f90c731c78aa" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 11.435s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.041408] env[61898]: INFO nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 4c02ebbd-345f-4253-bc28-f90c731c78aa] During sync_power_state the instance has a pending task (spawning). Skip. [ 538.041580] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "4c02ebbd-345f-4253-bc28-f90c731c78aa" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.410321] env[61898]: INFO nova.compute.manager [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] Took 1.05 seconds to deallocate network for instance. [ 538.518103] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.518888] env[61898]: DEBUG nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 538.524311] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.808s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.545629] env[61898]: DEBUG nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 538.886762] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "759d1958-0518-4654-8686-38be0920c29f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.889826] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "759d1958-0518-4654-8686-38be0920c29f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.920013] env[61898]: DEBUG nova.compute.manager [req-aab77dec-e8ae-4c7a-870f-1467b303c32b req-a2a5744d-9e6f-4637-a88a-ed553d12ddf4 service nova] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Received event network-vif-deleted-d2e48e6a-4b40-4d6e-b811-06ff79636e7a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 539.032702] env[61898]: DEBUG nova.compute.utils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 539.034185] env[61898]: DEBUG nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 539.034349] env[61898]: DEBUG nova.network.neutron [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 539.075045] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.122114] env[61898]: DEBUG nova.policy [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75b3ee14d6d14861ae7eb78edf76e98f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d6d05e9212f4413af795c1b8b809427', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 539.297746] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "cfb2f64b-7026-444d-8f86-500445343ac1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.298048] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "cfb2f64b-7026-444d-8f86-500445343ac1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.434102] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ee69af-8ba4-4f0e-b900-2990b88cbcce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.442473] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51de1746-bead-442a-821b-b01675acd049 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.478075] env[61898]: INFO nova.scheduler.client.report [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Deleted allocations for instance ba29c234-4f7b-414c-9b6b-6a2fa68e9533 [ 539.488149] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c7a529-397c-4482-9057-60e5d464e2b2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.499382] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41de593f-beb3-44b7-bdfa-f5c373dcf655 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.515189] env[61898]: DEBUG nova.compute.provider_tree [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 539.536614] env[61898]: DEBUG nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 539.642744] env[61898]: DEBUG nova.network.neutron [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Successfully created port: 6bc5168d-bdb4-4084-89c9-820c06cbff4f {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 539.997775] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6daad30c-27f9-4457-af4d-c75a8c560d1f tempest-ServerActionsTestJSON-1786679694 tempest-ServerActionsTestJSON-1786679694-project-member] Lock "ba29c234-4f7b-414c-9b6b-6a2fa68e9533" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.066s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.997775] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "ba29c234-4f7b-414c-9b6b-6a2fa68e9533" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 13.391s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.997775] env[61898]: INFO nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: ba29c234-4f7b-414c-9b6b-6a2fa68e9533] During sync_power_state the instance has a pending task (spawning). Skip. [ 539.997775] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "ba29c234-4f7b-414c-9b6b-6a2fa68e9533" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.017949] env[61898]: DEBUG nova.scheduler.client.report [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 540.500317] env[61898]: DEBUG nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 540.529521] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.004s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.529521] env[61898]: ERROR nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 82e047c2-8942-448f-b74f-5813bc4551fc, please check neutron logs for more information. [ 540.529521] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Traceback (most recent call last): [ 540.529521] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 540.529521] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] self.driver.spawn(context, instance, image_meta, [ 540.529521] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 540.529521] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] self._vmops.spawn(context, instance, image_meta, injected_files, [ 540.529521] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 540.529521] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] vm_ref = self.build_virtual_machine(instance, [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] vif_infos = vmwarevif.get_vif_info(self._session, [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] for vif in network_info: [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] return self._sync_wrapper(fn, *args, **kwargs) [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] self.wait() [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] self[:] = self._gt.wait() [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] return self._exit_event.wait() [ 540.529850] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] current.throw(*self._exc) [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] result = function(*args, **kwargs) [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] return func(*args, **kwargs) [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] raise e [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] nwinfo = self.network_api.allocate_for_instance( [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] created_port_ids = self._update_ports_for_instance( [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 540.530199] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] with excutils.save_and_reraise_exception(): [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] self.force_reraise() [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] raise self.value [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] updated_port = self._update_port( [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] _ensure_no_port_binding_failure(port) [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] raise exception.PortBindingFailed(port_id=port['id']) [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] nova.exception.PortBindingFailed: Binding failed for port 82e047c2-8942-448f-b74f-5813bc4551fc, please check neutron logs for more information. [ 540.530554] env[61898]: ERROR nova.compute.manager [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] [ 540.530900] env[61898]: DEBUG nova.compute.utils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Binding failed for port 82e047c2-8942-448f-b74f-5813bc4551fc, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 540.530900] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.781s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.533102] env[61898]: DEBUG nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Build of instance 57f77a5f-87d0-4a9a-80ea-4b24baf33d02 was re-scheduled: Binding failed for port 82e047c2-8942-448f-b74f-5813bc4551fc, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 540.534078] env[61898]: DEBUG nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 540.534078] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Acquiring lock "refresh_cache-57f77a5f-87d0-4a9a-80ea-4b24baf33d02" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.534188] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Acquired lock "refresh_cache-57f77a5f-87d0-4a9a-80ea-4b24baf33d02" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.534279] env[61898]: DEBUG nova.network.neutron [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 540.546447] env[61898]: DEBUG nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 540.590293] env[61898]: DEBUG nova.virt.hardware [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 540.590556] env[61898]: DEBUG nova.virt.hardware [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 540.590709] env[61898]: DEBUG nova.virt.hardware [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 540.590891] env[61898]: DEBUG nova.virt.hardware [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 540.591064] env[61898]: DEBUG nova.virt.hardware [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 540.591224] env[61898]: DEBUG nova.virt.hardware [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 540.591426] env[61898]: DEBUG nova.virt.hardware [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 540.591591] env[61898]: DEBUG nova.virt.hardware [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 540.591752] env[61898]: DEBUG nova.virt.hardware [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 540.591919] env[61898]: DEBUG nova.virt.hardware [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 540.592100] env[61898]: DEBUG nova.virt.hardware [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 540.593200] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cf5c8c-30fc-4bc3-afea-427e39149419 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.602880] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0514bee3-9afe-4075-9add-927facc6aa9a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.022842] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.057655] env[61898]: DEBUG nova.network.neutron [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.163219] env[61898]: DEBUG nova.network.neutron [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.171812] env[61898]: ERROR nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6bc5168d-bdb4-4084-89c9-820c06cbff4f, please check neutron logs for more information. [ 541.171812] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 541.171812] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 541.171812] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 541.171812] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.171812] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 541.171812] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.171812] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 541.171812] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.171812] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 541.171812] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.171812] env[61898]: ERROR nova.compute.manager raise self.value [ 541.171812] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.171812] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 541.171812] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.171812] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 541.172323] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.172323] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 541.172323] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6bc5168d-bdb4-4084-89c9-820c06cbff4f, please check neutron logs for more information. [ 541.172323] env[61898]: ERROR nova.compute.manager [ 541.172323] env[61898]: Traceback (most recent call last): [ 541.172323] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 541.172323] env[61898]: listener.cb(fileno) [ 541.172323] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.172323] env[61898]: result = function(*args, **kwargs) [ 541.172323] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 541.172323] env[61898]: return func(*args, **kwargs) [ 541.172323] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 541.172323] env[61898]: raise e [ 541.172323] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 541.172323] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 541.172323] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.172323] env[61898]: created_port_ids = self._update_ports_for_instance( [ 541.172323] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.172323] env[61898]: with excutils.save_and_reraise_exception(): [ 541.172323] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.172323] env[61898]: self.force_reraise() [ 541.172323] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.172323] env[61898]: raise self.value [ 541.172323] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.172323] env[61898]: updated_port = self._update_port( [ 541.172323] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.172323] env[61898]: _ensure_no_port_binding_failure(port) [ 541.172323] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.172323] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 541.176899] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 6bc5168d-bdb4-4084-89c9-820c06cbff4f, please check neutron logs for more information. [ 541.176899] env[61898]: Removing descriptor: 19 [ 541.176899] env[61898]: ERROR nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6bc5168d-bdb4-4084-89c9-820c06cbff4f, please check neutron logs for more information. [ 541.176899] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Traceback (most recent call last): [ 541.176899] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 541.176899] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] yield resources [ 541.176899] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 541.176899] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] self.driver.spawn(context, instance, image_meta, [ 541.176899] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 541.176899] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 541.176899] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 541.176899] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] vm_ref = self.build_virtual_machine(instance, [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] for vif in network_info: [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] return self._sync_wrapper(fn, *args, **kwargs) [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] self.wait() [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] self[:] = self._gt.wait() [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] return self._exit_event.wait() [ 541.177368] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] result = hub.switch() [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] return self.greenlet.switch() [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] result = function(*args, **kwargs) [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] return func(*args, **kwargs) [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] raise e [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] nwinfo = self.network_api.allocate_for_instance( [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 541.177858] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] created_port_ids = self._update_ports_for_instance( [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] with excutils.save_and_reraise_exception(): [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] self.force_reraise() [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] raise self.value [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] updated_port = self._update_port( [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] _ensure_no_port_binding_failure(port) [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 541.178267] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] raise exception.PortBindingFailed(port_id=port['id']) [ 541.178618] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] nova.exception.PortBindingFailed: Binding failed for port 6bc5168d-bdb4-4084-89c9-820c06cbff4f, please check neutron logs for more information. [ 541.178618] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] [ 541.178618] env[61898]: INFO nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Terminating instance [ 541.360475] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045b1b7f-3783-4f3e-bed4-bf938cf4e93a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.366769] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9105b0-cfdf-4a45-a812-a9f1ac1c436c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.399894] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0523b0e2-9ef2-40ae-be95-120d29b802b7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.407830] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b4b8f7-8b88-4c5b-8337-b6b17d18698c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.422474] env[61898]: DEBUG nova.compute.provider_tree [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 541.667673] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Releasing lock "refresh_cache-57f77a5f-87d0-4a9a-80ea-4b24baf33d02" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.667999] env[61898]: DEBUG nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 541.668159] env[61898]: DEBUG nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 541.668323] env[61898]: DEBUG nova.network.neutron [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 541.680352] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Acquiring lock "refresh_cache-4b39d9ad-b7d3-4464-b9e0-799440b445e4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.682555] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Acquired lock "refresh_cache-4b39d9ad-b7d3-4464-b9e0-799440b445e4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.682555] env[61898]: DEBUG nova.network.neutron [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 541.686217] env[61898]: DEBUG nova.network.neutron [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 541.844007] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Acquiring lock "f968f3df-c70b-466b-8aaa-879354f12d3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.844241] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Lock "f968f3df-c70b-466b-8aaa-879354f12d3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.850269] env[61898]: DEBUG nova.compute.manager [req-05eee3c4-3492-4a45-a610-2ab5e6a1ccb5 req-5fdcbf7a-821c-4975-8265-2543fd5bf6f5 service nova] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Received event network-changed-6bc5168d-bdb4-4084-89c9-820c06cbff4f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 541.850269] env[61898]: DEBUG nova.compute.manager [req-05eee3c4-3492-4a45-a610-2ab5e6a1ccb5 req-5fdcbf7a-821c-4975-8265-2543fd5bf6f5 service nova] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Refreshing instance network info cache due to event network-changed-6bc5168d-bdb4-4084-89c9-820c06cbff4f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 541.850433] env[61898]: DEBUG oslo_concurrency.lockutils [req-05eee3c4-3492-4a45-a610-2ab5e6a1ccb5 req-5fdcbf7a-821c-4975-8265-2543fd5bf6f5 service nova] Acquiring lock "refresh_cache-4b39d9ad-b7d3-4464-b9e0-799440b445e4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.929181] env[61898]: DEBUG nova.scheduler.client.report [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 541.979266] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Acquiring lock "1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.979543] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Lock "1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.188100] env[61898]: DEBUG nova.network.neutron [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.208489] env[61898]: DEBUG nova.network.neutron [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 542.348122] env[61898]: DEBUG nova.network.neutron [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.439595] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.910s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.444481] env[61898]: ERROR nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 46220313-cfd2-4aa5-9379-894cdd6a4ccf, please check neutron logs for more information. [ 542.444481] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Traceback (most recent call last): [ 542.444481] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 542.444481] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] self.driver.spawn(context, instance, image_meta, [ 542.444481] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 542.444481] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 542.444481] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 542.444481] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] vm_ref = self.build_virtual_machine(instance, [ 542.444481] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 542.444481] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] vif_infos = vmwarevif.get_vif_info(self._session, [ 542.444481] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] for vif in network_info: [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] return self._sync_wrapper(fn, *args, **kwargs) [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] self.wait() [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] self[:] = self._gt.wait() [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] return self._exit_event.wait() [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] result = hub.switch() [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 542.444801] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] return self.greenlet.switch() [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] result = function(*args, **kwargs) [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] return func(*args, **kwargs) [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] raise e [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] nwinfo = self.network_api.allocate_for_instance( [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] created_port_ids = self._update_ports_for_instance( [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] with excutils.save_and_reraise_exception(): [ 542.445217] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 542.445667] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] self.force_reraise() [ 542.445667] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 542.445667] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] raise self.value [ 542.445667] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 542.445667] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] updated_port = self._update_port( [ 542.445667] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 542.445667] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] _ensure_no_port_binding_failure(port) [ 542.445667] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 542.445667] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] raise exception.PortBindingFailed(port_id=port['id']) [ 542.445667] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] nova.exception.PortBindingFailed: Binding failed for port 46220313-cfd2-4aa5-9379-894cdd6a4ccf, please check neutron logs for more information. [ 542.445667] env[61898]: ERROR nova.compute.manager [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] [ 542.446107] env[61898]: DEBUG nova.compute.utils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Binding failed for port 46220313-cfd2-4aa5-9379-894cdd6a4ccf, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 542.446107] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.322s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.446107] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.446107] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 542.446107] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.576s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.447755] env[61898]: INFO nova.compute.claims [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 542.450890] env[61898]: DEBUG nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Build of instance 8c006d14-dbd9-40b9-a474-06382fea3b2f was re-scheduled: Binding failed for port 46220313-cfd2-4aa5-9379-894cdd6a4ccf, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 542.451463] env[61898]: DEBUG nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 542.451711] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Acquiring lock "refresh_cache-8c006d14-dbd9-40b9-a474-06382fea3b2f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 542.451861] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Acquired lock "refresh_cache-8c006d14-dbd9-40b9-a474-06382fea3b2f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.452031] env[61898]: DEBUG nova.network.neutron [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 542.453946] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dea222b-f8c5-456a-b130-b9b69cbfb406 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.463743] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483e7a67-acce-4171-b9ff-075a84b3f18c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.478783] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cd54a4-7b17-4d4b-97de-bb6c60240605 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.485642] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0cad65c-3b22-4272-95fa-db42138d4044 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.518678] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181487MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 542.518926] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.691762] env[61898]: INFO nova.compute.manager [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] Took 1.02 seconds to deallocate network for instance. [ 542.855513] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Releasing lock "refresh_cache-4b39d9ad-b7d3-4464-b9e0-799440b445e4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.855999] env[61898]: DEBUG nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 542.856232] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 542.856548] env[61898]: DEBUG oslo_concurrency.lockutils [req-05eee3c4-3492-4a45-a610-2ab5e6a1ccb5 req-5fdcbf7a-821c-4975-8265-2543fd5bf6f5 service nova] Acquired lock "refresh_cache-4b39d9ad-b7d3-4464-b9e0-799440b445e4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 542.856714] env[61898]: DEBUG nova.network.neutron [req-05eee3c4-3492-4a45-a610-2ab5e6a1ccb5 req-5fdcbf7a-821c-4975-8265-2543fd5bf6f5 service nova] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Refreshing network info cache for port 6bc5168d-bdb4-4084-89c9-820c06cbff4f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 542.857920] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ad5ee88-8f8d-4ae4-80cf-0968defc0b28 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.868789] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a228351-526e-44b3-acf5-ef6817d52c16 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.890195] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4b39d9ad-b7d3-4464-b9e0-799440b445e4 could not be found. [ 542.890835] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 542.890835] env[61898]: INFO nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Took 0.03 seconds to destroy the instance on the hypervisor. [ 542.890835] env[61898]: DEBUG oslo.service.loopingcall [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 542.891172] env[61898]: DEBUG nova.compute.manager [-] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 542.891239] env[61898]: DEBUG nova.network.neutron [-] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 542.936123] env[61898]: DEBUG nova.network.neutron [-] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 542.994131] env[61898]: DEBUG nova.network.neutron [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 543.069789] env[61898]: DEBUG nova.network.neutron [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.440052] env[61898]: DEBUG nova.network.neutron [-] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.544956] env[61898]: DEBUG nova.network.neutron [req-05eee3c4-3492-4a45-a610-2ab5e6a1ccb5 req-5fdcbf7a-821c-4975-8265-2543fd5bf6f5 service nova] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 543.572183] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Releasing lock "refresh_cache-8c006d14-dbd9-40b9-a474-06382fea3b2f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 543.572575] env[61898]: DEBUG nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 543.572874] env[61898]: DEBUG nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 543.573342] env[61898]: DEBUG nova.network.neutron [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 543.615679] env[61898]: DEBUG nova.network.neutron [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 543.722792] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquiring lock "1f7b6f74-24c1-4db1-9f70-350f307a07b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.723070] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "1f7b6f74-24c1-4db1-9f70-350f307a07b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.745108] env[61898]: DEBUG nova.network.neutron [req-05eee3c4-3492-4a45-a610-2ab5e6a1ccb5 req-5fdcbf7a-821c-4975-8265-2543fd5bf6f5 service nova] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 543.749270] env[61898]: INFO nova.scheduler.client.report [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Deleted allocations for instance 57f77a5f-87d0-4a9a-80ea-4b24baf33d02 [ 543.872358] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fba5c79-19bf-4218-b101-112ab554d195 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.881229] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7413e708-bdf6-4217-973b-b35ef92250ce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.917838] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49344a8-55de-4728-b187-18c8d8a2565e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.925605] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f026c543-6a9b-434f-bfdf-b60caf974fbe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.939488] env[61898]: DEBUG nova.compute.provider_tree [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.944273] env[61898]: INFO nova.compute.manager [-] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Took 1.05 seconds to deallocate network for instance. [ 543.944814] env[61898]: DEBUG nova.compute.claims [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 543.944920] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.118155] env[61898]: DEBUG nova.network.neutron [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.258662] env[61898]: DEBUG oslo_concurrency.lockutils [req-05eee3c4-3492-4a45-a610-2ab5e6a1ccb5 req-5fdcbf7a-821c-4975-8265-2543fd5bf6f5 service nova] Releasing lock "refresh_cache-4b39d9ad-b7d3-4464-b9e0-799440b445e4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.265189] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ed798189-7fed-4c15-8822-e4502e0267fe tempest-ImagesNegativeTestJSON-595136820 tempest-ImagesNegativeTestJSON-595136820-project-member] Lock "57f77a5f-87d0-4a9a-80ea-4b24baf33d02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.831s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.265560] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "57f77a5f-87d0-4a9a-80ea-4b24baf33d02" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 17.659s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.265772] env[61898]: INFO nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 57f77a5f-87d0-4a9a-80ea-4b24baf33d02] During sync_power_state the instance has a pending task (spawning). Skip. [ 544.265937] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "57f77a5f-87d0-4a9a-80ea-4b24baf33d02" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.300531] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquiring lock "23a0d825-3132-44d5-8b73-a06a0c0e7b1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.301120] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "23a0d825-3132-44d5-8b73-a06a0c0e7b1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.403557] env[61898]: DEBUG nova.compute.manager [req-eb05d905-d81d-4830-9522-d0906fb7ae17 req-36a85c74-1d0a-44ba-bbc5-13aaac296cc4 service nova] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Received event network-vif-deleted-6bc5168d-bdb4-4084-89c9-820c06cbff4f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 544.444258] env[61898]: DEBUG nova.scheduler.client.report [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 544.625546] env[61898]: INFO nova.compute.manager [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] Took 1.05 seconds to deallocate network for instance. [ 544.771637] env[61898]: DEBUG nova.compute.manager [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 544.952347] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.506s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.956416] env[61898]: DEBUG nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 544.961677] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.503s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.299753] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.463699] env[61898]: DEBUG nova.compute.utils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 545.465682] env[61898]: DEBUG nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 545.465682] env[61898]: DEBUG nova.network.neutron [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 545.658076] env[61898]: DEBUG nova.policy [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee555b0896f748d1886e7037911db84f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c2e835a924c438287e7626c34c2fb05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 545.670782] env[61898]: INFO nova.scheduler.client.report [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Deleted allocations for instance 8c006d14-dbd9-40b9-a474-06382fea3b2f [ 545.884441] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6854c69d-fca5-48cf-8fc4-b41fa17f0532 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.893230] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0ac174-675c-48a5-bc69-cfc240a4231f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.927135] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35dea16-0f48-47dc-bae9-02abb6d61718 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.935505] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52eab56-c5d2-4657-9361-c0386dedf12d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.951321] env[61898]: DEBUG nova.compute.provider_tree [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 545.968381] env[61898]: DEBUG nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 546.182378] env[61898]: DEBUG oslo_concurrency.lockutils [None req-70e91236-1170-43bb-951d-e9202ed69f24 tempest-ServerDiagnosticsNegativeTest-1372188627 tempest-ServerDiagnosticsNegativeTest-1372188627-project-member] Lock "8c006d14-dbd9-40b9-a474-06382fea3b2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.843s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.183847] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "8c006d14-dbd9-40b9-a474-06382fea3b2f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 19.577s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.184253] env[61898]: INFO nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 8c006d14-dbd9-40b9-a474-06382fea3b2f] During sync_power_state the instance has a pending task (spawning). Skip. [ 546.184253] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "8c006d14-dbd9-40b9-a474-06382fea3b2f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.456725] env[61898]: DEBUG nova.scheduler.client.report [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 546.509170] env[61898]: DEBUG nova.network.neutron [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Successfully created port: 6398b5f8-2603-4da2-b3ce-44653a7617a8 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.688942] env[61898]: DEBUG nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 546.967867] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.005s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 546.967867] env[61898]: ERROR nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d6cdf4d1-28f5-4d43-9339-5656f128329e, please check neutron logs for more information. [ 546.967867] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Traceback (most recent call last): [ 546.967867] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 546.967867] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] self.driver.spawn(context, instance, image_meta, [ 546.967867] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 546.967867] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 546.967867] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 546.967867] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] vm_ref = self.build_virtual_machine(instance, [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] vif_infos = vmwarevif.get_vif_info(self._session, [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] for vif in network_info: [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] return self._sync_wrapper(fn, *args, **kwargs) [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] self.wait() [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] self[:] = self._gt.wait() [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] return self._exit_event.wait() [ 546.968776] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] result = hub.switch() [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] return self.greenlet.switch() [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] result = function(*args, **kwargs) [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] return func(*args, **kwargs) [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] raise e [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] nwinfo = self.network_api.allocate_for_instance( [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 546.969414] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] created_port_ids = self._update_ports_for_instance( [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] with excutils.save_and_reraise_exception(): [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] self.force_reraise() [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] raise self.value [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] updated_port = self._update_port( [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] _ensure_no_port_binding_failure(port) [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 546.970260] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] raise exception.PortBindingFailed(port_id=port['id']) [ 546.970633] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] nova.exception.PortBindingFailed: Binding failed for port d6cdf4d1-28f5-4d43-9339-5656f128329e, please check neutron logs for more information. [ 546.970633] env[61898]: ERROR nova.compute.manager [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] [ 546.970633] env[61898]: DEBUG nova.compute.utils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Binding failed for port d6cdf4d1-28f5-4d43-9339-5656f128329e, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 546.970633] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.049s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.975935] env[61898]: DEBUG nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Build of instance ceaef138-8b51-428b-9966-e6db3e7b0eb3 was re-scheduled: Binding failed for port d6cdf4d1-28f5-4d43-9339-5656f128329e, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 546.975935] env[61898]: DEBUG nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 546.975935] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Acquiring lock "refresh_cache-ceaef138-8b51-428b-9966-e6db3e7b0eb3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.975935] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Acquired lock "refresh_cache-ceaef138-8b51-428b-9966-e6db3e7b0eb3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.976153] env[61898]: DEBUG nova.network.neutron [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 546.981481] env[61898]: DEBUG nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 547.012207] env[61898]: DEBUG nova.virt.hardware [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 547.012461] env[61898]: DEBUG nova.virt.hardware [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 547.012620] env[61898]: DEBUG nova.virt.hardware [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 547.012798] env[61898]: DEBUG nova.virt.hardware [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 547.012938] env[61898]: DEBUG nova.virt.hardware [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 547.013093] env[61898]: DEBUG nova.virt.hardware [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 547.013380] env[61898]: DEBUG nova.virt.hardware [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 547.013447] env[61898]: DEBUG nova.virt.hardware [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 547.013604] env[61898]: DEBUG nova.virt.hardware [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 547.014405] env[61898]: DEBUG nova.virt.hardware [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 547.014405] env[61898]: DEBUG nova.virt.hardware [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 547.015126] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d26f58-f502-4a79-94b1-1583ac146965 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.027222] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc56c951-3a71-479d-8913-b4dd1cae3e59 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.218151] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.518032] env[61898]: DEBUG nova.network.neutron [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 547.773386] env[61898]: DEBUG nova.network.neutron [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.859929] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf93df5-fda2-4cc0-a1a0-ff543b2ebd0d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.869130] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5efaed-f069-4465-923c-6947bd7ce0db {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.900385] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ee1ec1-93a2-497f-b60c-fa676e0d0822 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.908258] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbea532-116b-4355-af08-d458183b1ff5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.923356] env[61898]: DEBUG nova.compute.provider_tree [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.278794] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Releasing lock "refresh_cache-ceaef138-8b51-428b-9966-e6db3e7b0eb3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.279112] env[61898]: DEBUG nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 548.279294] env[61898]: DEBUG nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 548.279815] env[61898]: DEBUG nova.network.neutron [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 548.325987] env[61898]: DEBUG nova.network.neutron [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 548.426170] env[61898]: DEBUG nova.scheduler.client.report [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 548.833925] env[61898]: DEBUG nova.network.neutron [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.932483] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.962s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.933215] env[61898]: ERROR nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 69e1ee22-79a2-476a-93cf-869da323bd9b, please check neutron logs for more information. [ 548.933215] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Traceback (most recent call last): [ 548.933215] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 548.933215] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] self.driver.spawn(context, instance, image_meta, [ 548.933215] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 548.933215] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 548.933215] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 548.933215] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] vm_ref = self.build_virtual_machine(instance, [ 548.933215] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 548.933215] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] vif_infos = vmwarevif.get_vif_info(self._session, [ 548.933215] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] for vif in network_info: [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] return self._sync_wrapper(fn, *args, **kwargs) [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] self.wait() [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] self[:] = self._gt.wait() [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] return self._exit_event.wait() [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] result = hub.switch() [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 548.933622] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] return self.greenlet.switch() [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] result = function(*args, **kwargs) [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] return func(*args, **kwargs) [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] raise e [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] nwinfo = self.network_api.allocate_for_instance( [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] created_port_ids = self._update_ports_for_instance( [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] with excutils.save_and_reraise_exception(): [ 548.934030] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.934437] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] self.force_reraise() [ 548.934437] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.934437] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] raise self.value [ 548.934437] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 548.934437] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] updated_port = self._update_port( [ 548.934437] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.934437] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] _ensure_no_port_binding_failure(port) [ 548.934437] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.934437] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] raise exception.PortBindingFailed(port_id=port['id']) [ 548.934437] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] nova.exception.PortBindingFailed: Binding failed for port 69e1ee22-79a2-476a-93cf-869da323bd9b, please check neutron logs for more information. [ 548.934437] env[61898]: ERROR nova.compute.manager [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] [ 548.935030] env[61898]: DEBUG nova.compute.utils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Binding failed for port 69e1ee22-79a2-476a-93cf-869da323bd9b, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 548.935678] env[61898]: DEBUG nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Build of instance 8e5a01e2-67ba-4832-815f-34767deba62f was re-scheduled: Binding failed for port 69e1ee22-79a2-476a-93cf-869da323bd9b, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 548.936115] env[61898]: DEBUG nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 548.936351] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Acquiring lock "refresh_cache-8e5a01e2-67ba-4832-815f-34767deba62f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.936495] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Acquired lock "refresh_cache-8e5a01e2-67ba-4832-815f-34767deba62f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.936902] env[61898]: DEBUG nova.network.neutron [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 548.937683] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.208s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.937893] env[61898]: DEBUG nova.objects.instance [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Lazy-loading 'resources' on Instance uuid 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 549.336501] env[61898]: INFO nova.compute.manager [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] Took 1.06 seconds to deallocate network for instance. [ 549.509593] env[61898]: DEBUG nova.network.neutron [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 549.779834] env[61898]: DEBUG nova.network.neutron [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.833023] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8562b3ed-59bc-40ce-a202-8c2eff9a1c89 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.839668] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054977fe-8c74-4c78-8755-bb27865960e0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.878362] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987cf015-c5a5-4635-8b3d-1b95bac82c5e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.886909] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7ea497-eded-4d8e-84b8-05410ff4114b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.901321] env[61898]: DEBUG nova.compute.provider_tree [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 550.285350] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Releasing lock "refresh_cache-8e5a01e2-67ba-4832-815f-34767deba62f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.285633] env[61898]: DEBUG nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 550.285744] env[61898]: DEBUG nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 550.285968] env[61898]: DEBUG nova.network.neutron [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 550.329221] env[61898]: DEBUG nova.network.neutron [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 550.381375] env[61898]: INFO nova.scheduler.client.report [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Deleted allocations for instance ceaef138-8b51-428b-9966-e6db3e7b0eb3 [ 550.405385] env[61898]: DEBUG nova.scheduler.client.report [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 550.835316] env[61898]: DEBUG nova.network.neutron [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.896550] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce8093a6-b798-4348-87dc-017029ddaa0e tempest-VolumesAssistedSnapshotsTest-1930314320 tempest-VolumesAssistedSnapshotsTest-1930314320-project-member] Lock "ceaef138-8b51-428b-9966-e6db3e7b0eb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.243s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.898501] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "ceaef138-8b51-428b-9966-e6db3e7b0eb3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 24.291s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.900158] env[61898]: INFO nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: ceaef138-8b51-428b-9966-e6db3e7b0eb3] During sync_power_state the instance has a pending task (networking). Skip. [ 550.900158] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "ceaef138-8b51-428b-9966-e6db3e7b0eb3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.002s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.913471] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.974s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.915231] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.824s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.919259] env[61898]: INFO nova.compute.claims [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.948721] env[61898]: INFO nova.scheduler.client.report [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Deleted allocations for instance 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9 [ 551.026443] env[61898]: DEBUG nova.compute.manager [req-f3765436-9755-466e-9a47-84eda364a13b req-3705b5b5-1ee3-4d8b-91ce-eda91c2c5866 service nova] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Received event network-changed-6398b5f8-2603-4da2-b3ce-44653a7617a8 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 551.026872] env[61898]: DEBUG nova.compute.manager [req-f3765436-9755-466e-9a47-84eda364a13b req-3705b5b5-1ee3-4d8b-91ce-eda91c2c5866 service nova] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Refreshing instance network info cache due to event network-changed-6398b5f8-2603-4da2-b3ce-44653a7617a8. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 551.027376] env[61898]: DEBUG oslo_concurrency.lockutils [req-f3765436-9755-466e-9a47-84eda364a13b req-3705b5b5-1ee3-4d8b-91ce-eda91c2c5866 service nova] Acquiring lock "refresh_cache-b521cc8c-e214-467f-8399-55f075b9bba3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.027732] env[61898]: DEBUG oslo_concurrency.lockutils [req-f3765436-9755-466e-9a47-84eda364a13b req-3705b5b5-1ee3-4d8b-91ce-eda91c2c5866 service nova] Acquired lock "refresh_cache-b521cc8c-e214-467f-8399-55f075b9bba3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 551.028101] env[61898]: DEBUG nova.network.neutron [req-f3765436-9755-466e-9a47-84eda364a13b req-3705b5b5-1ee3-4d8b-91ce-eda91c2c5866 service nova] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Refreshing network info cache for port 6398b5f8-2603-4da2-b3ce-44653a7617a8 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 551.290420] env[61898]: ERROR nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6398b5f8-2603-4da2-b3ce-44653a7617a8, please check neutron logs for more information. [ 551.290420] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 551.290420] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 551.290420] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 551.290420] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 551.290420] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 551.290420] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 551.290420] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 551.290420] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 551.290420] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 551.290420] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 551.290420] env[61898]: ERROR nova.compute.manager raise self.value [ 551.290420] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 551.290420] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 551.290420] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 551.290420] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 551.291140] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 551.291140] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 551.291140] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6398b5f8-2603-4da2-b3ce-44653a7617a8, please check neutron logs for more information. [ 551.291140] env[61898]: ERROR nova.compute.manager [ 551.291140] env[61898]: Traceback (most recent call last): [ 551.291140] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 551.291140] env[61898]: listener.cb(fileno) [ 551.291140] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 551.291140] env[61898]: result = function(*args, **kwargs) [ 551.291140] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 551.291140] env[61898]: return func(*args, **kwargs) [ 551.291140] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 551.291140] env[61898]: raise e [ 551.291140] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 551.291140] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 551.291140] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 551.291140] env[61898]: created_port_ids = self._update_ports_for_instance( [ 551.291140] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 551.291140] env[61898]: with excutils.save_and_reraise_exception(): [ 551.291140] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 551.291140] env[61898]: self.force_reraise() [ 551.291140] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 551.291140] env[61898]: raise self.value [ 551.291140] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 551.291140] env[61898]: updated_port = self._update_port( [ 551.291140] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 551.291140] env[61898]: _ensure_no_port_binding_failure(port) [ 551.291140] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 551.291140] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 551.292045] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 6398b5f8-2603-4da2-b3ce-44653a7617a8, please check neutron logs for more information. [ 551.292045] env[61898]: Removing descriptor: 19 [ 551.292045] env[61898]: ERROR nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6398b5f8-2603-4da2-b3ce-44653a7617a8, please check neutron logs for more information. [ 551.292045] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Traceback (most recent call last): [ 551.292045] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 551.292045] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] yield resources [ 551.292045] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 551.292045] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] self.driver.spawn(context, instance, image_meta, [ 551.292045] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 551.292045] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 551.292045] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 551.292045] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] vm_ref = self.build_virtual_machine(instance, [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] vif_infos = vmwarevif.get_vif_info(self._session, [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] for vif in network_info: [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] return self._sync_wrapper(fn, *args, **kwargs) [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] self.wait() [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] self[:] = self._gt.wait() [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] return self._exit_event.wait() [ 551.292392] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] result = hub.switch() [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] return self.greenlet.switch() [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] result = function(*args, **kwargs) [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] return func(*args, **kwargs) [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] raise e [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] nwinfo = self.network_api.allocate_for_instance( [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 551.292737] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] created_port_ids = self._update_ports_for_instance( [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] with excutils.save_and_reraise_exception(): [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] self.force_reraise() [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] raise self.value [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] updated_port = self._update_port( [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] _ensure_no_port_binding_failure(port) [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 551.293122] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] raise exception.PortBindingFailed(port_id=port['id']) [ 551.293445] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] nova.exception.PortBindingFailed: Binding failed for port 6398b5f8-2603-4da2-b3ce-44653a7617a8, please check neutron logs for more information. [ 551.293445] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] [ 551.293445] env[61898]: INFO nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Terminating instance [ 551.339344] env[61898]: INFO nova.compute.manager [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] Took 1.05 seconds to deallocate network for instance. [ 551.400803] env[61898]: DEBUG nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 551.460565] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ef5d734d-8be7-47ba-bf93-56172161540b tempest-ServerDiagnosticsV248Test-327591428 tempest-ServerDiagnosticsV248Test-327591428-project-member] Lock "8a5c9847-fc0d-41f7-87b8-d7ff44073ea9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.149s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.602237] env[61898]: DEBUG nova.network.neutron [req-f3765436-9755-466e-9a47-84eda364a13b req-3705b5b5-1ee3-4d8b-91ce-eda91c2c5866 service nova] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 551.798318] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-b521cc8c-e214-467f-8399-55f075b9bba3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 551.946598] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.969673] env[61898]: DEBUG nova.network.neutron [req-f3765436-9755-466e-9a47-84eda364a13b req-3705b5b5-1ee3-4d8b-91ce-eda91c2c5866 service nova] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.349715] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22aba0e2-1486-4084-9d0a-b3986112ba69 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.368546] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c0c8c5-e384-40f3-97d5-31424cf44e92 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.422089] env[61898]: INFO nova.scheduler.client.report [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Deleted allocations for instance 8e5a01e2-67ba-4832-815f-34767deba62f [ 552.430743] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2415ff23-aab1-461b-842d-d493ff65156d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.443865] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724a1d16-9f4d-47bf-9ca3-f8adc119e8ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.463835] env[61898]: DEBUG nova.compute.provider_tree [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.473458] env[61898]: DEBUG oslo_concurrency.lockutils [req-f3765436-9755-466e-9a47-84eda364a13b req-3705b5b5-1ee3-4d8b-91ce-eda91c2c5866 service nova] Releasing lock "refresh_cache-b521cc8c-e214-467f-8399-55f075b9bba3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 552.478020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-b521cc8c-e214-467f-8399-55f075b9bba3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.478020] env[61898]: DEBUG nova.network.neutron [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 552.936946] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9321d29a-7c12-4470-9376-2fa5662188d3 tempest-ServerExternalEventsTest-1814976015 tempest-ServerExternalEventsTest-1814976015-project-member] Lock "8e5a01e2-67ba-4832-815f-34767deba62f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.977s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.938733] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "8e5a01e2-67ba-4832-815f-34767deba62f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 26.331s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.939426] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64a85916-27fd-481b-9bc1-d7ea52c76c94 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.953503] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86328ef-6432-4ff8-a5d7-a0a59fc3ba71 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.970415] env[61898]: DEBUG nova.scheduler.client.report [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 553.258674] env[61898]: DEBUG nova.network.neutron [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 553.445329] env[61898]: DEBUG nova.compute.manager [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 553.475366] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.475639] env[61898]: DEBUG nova.compute.manager [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 553.478396] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.243s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.498380] env[61898]: INFO nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 8e5a01e2-67ba-4832-815f-34767deba62f] During the sync_power process the instance has moved from host None to host cpu-1 [ 553.498619] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "8e5a01e2-67ba-4832-815f-34767deba62f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.560s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.732978] env[61898]: DEBUG nova.network.neutron [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.986572] env[61898]: DEBUG nova.compute.utils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 553.992932] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.993891] env[61898]: DEBUG nova.compute.manager [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Not allocating networking since 'none' was specified. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 554.013264] env[61898]: DEBUG nova.compute.manager [req-6716c3b7-a44d-4d8c-9b5d-d94ff800e714 req-35a6c554-83d9-470d-84d6-bdb0d855ccf1 service nova] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Received event network-vif-deleted-6398b5f8-2603-4da2-b3ce-44653a7617a8 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 554.238087] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-b521cc8c-e214-467f-8399-55f075b9bba3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.238553] env[61898]: DEBUG nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 554.238691] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 554.238989] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5798cbf6-66cd-4033-84dd-79b8bd3b1836 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.257680] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070afe85-a829-4aee-88d4-98a3a01dc026 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.283841] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b521cc8c-e214-467f-8399-55f075b9bba3 could not be found. [ 554.284909] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 554.286066] env[61898]: INFO nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 554.286066] env[61898]: DEBUG oslo.service.loopingcall [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.289938] env[61898]: DEBUG nova.compute.manager [-] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 554.290218] env[61898]: DEBUG nova.network.neutron [-] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 554.349962] env[61898]: DEBUG nova.network.neutron [-] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 554.415208] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb46f09-b3d2-4e78-9e89-4138bbda53d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.423927] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9b0d31-22fa-4231-ba1a-fbd3f8ccde2f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.465136] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9eb7a68-4bfa-4b34-a5a3-f8acdd70a379 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.478043] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a567e632-cde8-496e-94e4-652d0d967172 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.492304] env[61898]: DEBUG nova.compute.provider_tree [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 554.494066] env[61898]: DEBUG nova.compute.manager [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 554.853670] env[61898]: DEBUG nova.network.neutron [-] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.001843] env[61898]: DEBUG nova.scheduler.client.report [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 555.357019] env[61898]: INFO nova.compute.manager [-] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Took 1.07 seconds to deallocate network for instance. [ 555.360381] env[61898]: DEBUG nova.compute.claims [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 555.360381] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.510944] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.032s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.511644] env[61898]: ERROR nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d2e48e6a-4b40-4d6e-b811-06ff79636e7a, please check neutron logs for more information. [ 555.511644] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Traceback (most recent call last): [ 555.511644] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 555.511644] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] self.driver.spawn(context, instance, image_meta, [ 555.511644] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 555.511644] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 555.511644] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 555.511644] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] vm_ref = self.build_virtual_machine(instance, [ 555.511644] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 555.511644] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] vif_infos = vmwarevif.get_vif_info(self._session, [ 555.511644] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] for vif in network_info: [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] return self._sync_wrapper(fn, *args, **kwargs) [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] self.wait() [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] self[:] = self._gt.wait() [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] return self._exit_event.wait() [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] result = hub.switch() [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 555.512469] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] return self.greenlet.switch() [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] result = function(*args, **kwargs) [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] return func(*args, **kwargs) [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] raise e [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] nwinfo = self.network_api.allocate_for_instance( [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] created_port_ids = self._update_ports_for_instance( [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] with excutils.save_and_reraise_exception(): [ 555.512873] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 555.513298] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] self.force_reraise() [ 555.513298] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 555.513298] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] raise self.value [ 555.513298] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 555.513298] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] updated_port = self._update_port( [ 555.513298] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 555.513298] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] _ensure_no_port_binding_failure(port) [ 555.513298] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 555.513298] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] raise exception.PortBindingFailed(port_id=port['id']) [ 555.513298] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] nova.exception.PortBindingFailed: Binding failed for port d2e48e6a-4b40-4d6e-b811-06ff79636e7a, please check neutron logs for more information. [ 555.513298] env[61898]: ERROR nova.compute.manager [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] [ 555.513646] env[61898]: DEBUG nova.compute.utils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Binding failed for port d2e48e6a-4b40-4d6e-b811-06ff79636e7a, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 555.514410] env[61898]: DEBUG nova.compute.manager [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 555.516775] env[61898]: DEBUG nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Build of instance 5d11fc94-b63a-475c-bcb3-f212c838668c was re-scheduled: Binding failed for port d2e48e6a-4b40-4d6e-b811-06ff79636e7a, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 555.518289] env[61898]: DEBUG nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 555.518289] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Acquiring lock "refresh_cache-5d11fc94-b63a-475c-bcb3-f212c838668c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.518289] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Acquired lock "refresh_cache-5d11fc94-b63a-475c-bcb3-f212c838668c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.518289] env[61898]: DEBUG nova.network.neutron [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 555.519398] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.444s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.520827] env[61898]: INFO nova.compute.claims [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 555.553868] env[61898]: DEBUG nova.virt.hardware [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 555.553868] env[61898]: DEBUG nova.virt.hardware [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 555.553868] env[61898]: DEBUG nova.virt.hardware [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 555.553868] env[61898]: DEBUG nova.virt.hardware [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 555.554182] env[61898]: DEBUG nova.virt.hardware [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 555.554182] env[61898]: DEBUG nova.virt.hardware [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 555.555533] env[61898]: DEBUG nova.virt.hardware [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 555.555533] env[61898]: DEBUG nova.virt.hardware [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 555.555533] env[61898]: DEBUG nova.virt.hardware [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 555.555533] env[61898]: DEBUG nova.virt.hardware [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 555.555533] env[61898]: DEBUG nova.virt.hardware [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 555.557769] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1827fa7-f216-413a-8bf7-2ddcc2a07215 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.567268] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cac18da-2040-45f8-9d50-1aa93bc32266 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.585634] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 555.592733] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Creating folder: Project (783446afdd4747a6bd82c1d33dded9b7). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 555.593706] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65a89d16-8b78-488b-992a-0fc7851dcc79 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.604578] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Created folder: Project (783446afdd4747a6bd82c1d33dded9b7) in parent group-v267550. [ 555.604578] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Creating folder: Instances. Parent ref: group-v267554. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 555.604794] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f6b928d-e033-4833-9bfe-d96ea7ad7df9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.616028] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Created folder: Instances in parent group-v267554. [ 555.616028] env[61898]: DEBUG oslo.service.loopingcall [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.616218] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 555.616371] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb4fb452-b5ca-47ae-a900-116580075e95 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.635540] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 555.635540] env[61898]: value = "task-1240338" [ 555.635540] env[61898]: _type = "Task" [ 555.635540] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.643435] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240338, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.147860] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240338, 'name': CreateVM_Task, 'duration_secs': 0.271971} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.148154] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 556.149746] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.149746] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.149746] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 556.149746] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cb8d1fd-f405-4b91-883c-afb117c5a9cf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.157835] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 556.157835] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5255477c-fe94-b610-ae4b-28795212cfce" [ 556.157835] env[61898]: _type = "Task" [ 556.157835] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.165906] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5255477c-fe94-b610-ae4b-28795212cfce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.286057] env[61898]: DEBUG nova.network.neutron [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 556.313297] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquiring lock "082fe687-5038-4c31-9b27-f8a5c548cdc1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.314178] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "082fe687-5038-4c31-9b27-f8a5c548cdc1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.676860] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5255477c-fe94-b610-ae4b-28795212cfce, 'name': SearchDatastore_Task, 'duration_secs': 0.010762} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.679151] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.679969] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 556.680219] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.680365] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.680773] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 556.681412] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a67e117-eea5-4cd1-91bc-f99a91a09307 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.692061] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 556.694443] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 556.695847] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2a9de20-a45e-495e-8ef0-80b698e131de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.699649] env[61898]: DEBUG nova.network.neutron [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.704510] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 556.704510] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f4a725-3438-b185-c925-72706929b5c1" [ 556.704510] env[61898]: _type = "Task" [ 556.704510] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.713678] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f4a725-3438-b185-c925-72706929b5c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.884541] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91bc2356-d765-42e7-afe6-5cd3a5cd3b01 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.897852] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75db3c4-e18d-4cd5-b273-fedda4a42931 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.935206] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5de4812-1887-4acb-a88f-4d0b9af10e78 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.942276] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f64b8eb-6cd9-4349-88bc-5261af548588 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.957008] env[61898]: DEBUG nova.compute.provider_tree [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 557.208569] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Releasing lock "refresh_cache-5d11fc94-b63a-475c-bcb3-f212c838668c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.208569] env[61898]: DEBUG nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 557.208569] env[61898]: DEBUG nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 557.208569] env[61898]: DEBUG nova.network.neutron [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 557.224485] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f4a725-3438-b185-c925-72706929b5c1, 'name': SearchDatastore_Task, 'duration_secs': 0.012769} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.225739] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-313e5f21-917a-4400-a4bb-8b472863111e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.232595] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 557.232595] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5298bf69-0d76-df34-e1aa-9361e8297dee" [ 557.232595] env[61898]: _type = "Task" [ 557.232595] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.245171] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5298bf69-0d76-df34-e1aa-9361e8297dee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.365467] env[61898]: DEBUG nova.network.neutron [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 557.461539] env[61898]: DEBUG nova.scheduler.client.report [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 557.747410] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5298bf69-0d76-df34-e1aa-9361e8297dee, 'name': SearchDatastore_Task, 'duration_secs': 0.014799} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.747692] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.747736] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6/41ac9f9b-5cd3-4302-86ac-8ef7cae603b6.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 557.748533] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4688d080-f311-434e-bc80-bf676d6c3fb8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.756650] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 557.756650] env[61898]: value = "task-1240339" [ 557.756650] env[61898]: _type = "Task" [ 557.756650] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.767333] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.868169] env[61898]: DEBUG nova.network.neutron [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.967710] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.968315] env[61898]: DEBUG nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 557.971094] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.948s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.973159] env[61898]: INFO nova.compute.claims [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 558.274515] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240339, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464597} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.274770] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6/41ac9f9b-5cd3-4302-86ac-8ef7cae603b6.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 558.276596] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 558.276596] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ebc365fd-bd8e-470d-afab-36247a42048e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.282129] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 558.282129] env[61898]: value = "task-1240340" [ 558.282129] env[61898]: _type = "Task" [ 558.282129] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.291779] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.370810] env[61898]: INFO nova.compute.manager [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] [instance: 5d11fc94-b63a-475c-bcb3-f212c838668c] Took 1.16 seconds to deallocate network for instance. [ 558.478889] env[61898]: DEBUG nova.compute.utils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 558.483460] env[61898]: DEBUG nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 558.483614] env[61898]: DEBUG nova.network.neutron [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 558.792936] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064288} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.793245] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 558.794073] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95872e68-bfc0-41fa-9a1c-0231b0645c5e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.815261] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6/41ac9f9b-5cd3-4302-86ac-8ef7cae603b6.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 558.815571] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d64e7ae5-c05a-4583-915e-dc808b4af5ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.837563] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 558.837563] env[61898]: value = "task-1240341" [ 558.837563] env[61898]: _type = "Task" [ 558.837563] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.848271] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240341, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.896617] env[61898]: DEBUG nova.policy [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49ea50e4919245feaab941d68003001f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8c19e89e50e4357a12551a5821fdc90', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 558.992270] env[61898]: DEBUG nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 559.104915] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "c6e77597-5a5d-4b86-8588-7056828025cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.104915] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "c6e77597-5a5d-4b86-8588-7056828025cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.348278] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240341, 'name': ReconfigVM_Task, 'duration_secs': 0.302424} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.348906] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6/41ac9f9b-5cd3-4302-86ac-8ef7cae603b6.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 559.349332] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0a02d1b-abe1-4006-8fff-7b91b2571299 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.353431] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5d60c0-9c6b-4a75-86ea-7d83f9d3423b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.358011] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 559.358011] env[61898]: value = "task-1240342" [ 559.358011] env[61898]: _type = "Task" [ 559.358011] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.366561] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7171e1-a936-418c-8296-aa20f09883f2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.377714] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240342, 'name': Rename_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.414212] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfe9fc4-c3f8-46b3-9a93-3bcce6f7265f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.424478] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e84294f-4731-47dc-8c9f-44bbe87c9636 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.440709] env[61898]: DEBUG nova.compute.provider_tree [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.448085] env[61898]: INFO nova.scheduler.client.report [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Deleted allocations for instance 5d11fc94-b63a-475c-bcb3-f212c838668c [ 559.875262] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240342, 'name': Rename_Task, 'duration_secs': 0.140398} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.875262] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 559.875262] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa43fe9e-8a49-4475-bda2-cd9b25aa8e18 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.885052] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 559.885052] env[61898]: value = "task-1240343" [ 559.885052] env[61898]: _type = "Task" [ 559.885052] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.893663] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240343, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.943685] env[61898]: DEBUG nova.scheduler.client.report [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 559.952217] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e85cae83-ceaf-49fa-a8dc-3ebb7b3e842d tempest-ServerDiagnosticsTest-1623324106 tempest-ServerDiagnosticsTest-1623324106-project-member] Lock "5d11fc94-b63a-475c-bcb3-f212c838668c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.907s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.007424] env[61898]: DEBUG nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 560.034465] env[61898]: DEBUG nova.virt.hardware [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 560.034717] env[61898]: DEBUG nova.virt.hardware [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 560.034886] env[61898]: DEBUG nova.virt.hardware [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 560.035093] env[61898]: DEBUG nova.virt.hardware [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 560.035347] env[61898]: DEBUG nova.virt.hardware [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 560.035504] env[61898]: DEBUG nova.virt.hardware [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 560.035716] env[61898]: DEBUG nova.virt.hardware [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 560.036098] env[61898]: DEBUG nova.virt.hardware [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 560.036297] env[61898]: DEBUG nova.virt.hardware [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 560.036491] env[61898]: DEBUG nova.virt.hardware [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 560.036671] env[61898]: DEBUG nova.virt.hardware [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 560.037867] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c260e3-247d-4a55-b416-59844403be40 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.046457] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d61740-14d4-4cf8-9af7-3794364d7792 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.395368] env[61898]: DEBUG oslo_vmware.api [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240343, 'name': PowerOnVM_Task, 'duration_secs': 0.436356} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.395634] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 560.396365] env[61898]: INFO nova.compute.manager [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Took 4.88 seconds to spawn the instance on the hypervisor. [ 560.396615] env[61898]: DEBUG nova.compute.manager [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 560.397405] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd981a96-53f6-4c2e-9029-197e9097c737 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.454050] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.482s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.454050] env[61898]: DEBUG nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 560.457086] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.938s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.460560] env[61898]: DEBUG nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 560.923937] env[61898]: INFO nova.compute.manager [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Took 23.85 seconds to build instance. [ 560.932516] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquiring lock "74a2e109-244c-4349-a0b7-0db9e9d4868e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.933557] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "74a2e109-244c-4349-a0b7-0db9e9d4868e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.958243] env[61898]: DEBUG nova.compute.utils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 560.961131] env[61898]: DEBUG nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 560.961131] env[61898]: DEBUG nova.network.neutron [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 561.017847] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.237072] env[61898]: DEBUG nova.policy [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '917b839587b14923ba50b45b17e1359a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86247d53c88644ecbede5d23d7b0eb70', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 561.250415] env[61898]: DEBUG nova.network.neutron [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Successfully created port: 4e4c93c0-c1e9-4284-b5cf-d5292244187a {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 561.428823] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aaf623ec-34af-41b9-b711-69ef1ab21568 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.777s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.480129] env[61898]: DEBUG nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 561.504046] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 4b39d9ad-b7d3-4464-b9e0-799440b445e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 561.506478] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance b521cc8c-e214-467f-8399-55f075b9bba3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 561.506478] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 561.506478] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 561.506478] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance c1c15498-af88-4fcf-9a58-7060502bcaf2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 561.569937] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquiring lock "2cfdb95a-8c00-4528-a4bc-55f4ced67a89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 561.570171] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "2cfdb95a-8c00-4528-a4bc-55f4ced67a89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.934633] env[61898]: DEBUG nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 562.007315] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 0dfabd80-a385-4124-af33-083559819d7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 562.080050] env[61898]: DEBUG nova.network.neutron [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Successfully created port: 96f6921e-36a7-4acc-929b-d04b099a7893 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 562.463173] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.496683] env[61898]: DEBUG nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 562.517142] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance cf94c3f2-a4db-479f-8251-f2e403697678 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 562.531051] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Acquiring lock "6fc82922-9142-475b-99a6-bbc5ee43b30b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.531051] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Lock "6fc82922-9142-475b-99a6-bbc5ee43b30b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.536125] env[61898]: DEBUG nova.virt.hardware [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 562.536485] env[61898]: DEBUG nova.virt.hardware [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 562.536756] env[61898]: DEBUG nova.virt.hardware [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 562.537064] env[61898]: DEBUG nova.virt.hardware [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 562.537343] env[61898]: DEBUG nova.virt.hardware [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 562.537600] env[61898]: DEBUG nova.virt.hardware [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 562.537918] env[61898]: DEBUG nova.virt.hardware [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 562.538258] env[61898]: DEBUG nova.virt.hardware [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 562.539042] env[61898]: DEBUG nova.virt.hardware [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 562.541020] env[61898]: DEBUG nova.virt.hardware [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 562.541020] env[61898]: DEBUG nova.virt.hardware [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 562.541020] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c94641-b01e-4cd1-94c2-bc390707c787 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.549238] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f44170c-087c-4908-a741-2fce54964b6a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.029233] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 31aa8536-1597-4b61-b069-80daf5306dd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 563.533081] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 7ef91986-fb46-478b-85a5-05d597790ad9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 563.768674] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Acquiring lock "31239011-3cd9-4fea-a99d-26d09884497b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.768674] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Lock "31239011-3cd9-4fea-a99d-26d09884497b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.925382] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Acquiring lock "8ab18b24-91d4-4718-8f1a-d82f4226ba2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.925382] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Lock "8ab18b24-91d4-4718-8f1a-d82f4226ba2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.038964] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 564.073326] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquiring lock "81fd9ccc-a267-498d-93d4-8adf894ee8d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.073552] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lock "81fd9ccc-a267-498d-93d4-8adf894ee8d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.100639] env[61898]: ERROR nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4e4c93c0-c1e9-4284-b5cf-d5292244187a, please check neutron logs for more information. [ 564.100639] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 564.100639] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 564.100639] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 564.100639] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.100639] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 564.100639] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.100639] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 564.100639] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.100639] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 564.100639] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.100639] env[61898]: ERROR nova.compute.manager raise self.value [ 564.100639] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.100639] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 564.100639] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.100639] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 564.101208] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.101208] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 564.101208] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4e4c93c0-c1e9-4284-b5cf-d5292244187a, please check neutron logs for more information. [ 564.101208] env[61898]: ERROR nova.compute.manager [ 564.101208] env[61898]: Traceback (most recent call last): [ 564.101208] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 564.101208] env[61898]: listener.cb(fileno) [ 564.101208] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.101208] env[61898]: result = function(*args, **kwargs) [ 564.101208] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 564.101208] env[61898]: return func(*args, **kwargs) [ 564.101208] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 564.101208] env[61898]: raise e [ 564.101208] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 564.101208] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 564.101208] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.101208] env[61898]: created_port_ids = self._update_ports_for_instance( [ 564.101208] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.101208] env[61898]: with excutils.save_and_reraise_exception(): [ 564.101208] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.101208] env[61898]: self.force_reraise() [ 564.101208] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.101208] env[61898]: raise self.value [ 564.101208] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.101208] env[61898]: updated_port = self._update_port( [ 564.101208] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.101208] env[61898]: _ensure_no_port_binding_failure(port) [ 564.101208] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.101208] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 564.102341] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 4e4c93c0-c1e9-4284-b5cf-d5292244187a, please check neutron logs for more information. [ 564.102341] env[61898]: Removing descriptor: 19 [ 564.102518] env[61898]: ERROR nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4e4c93c0-c1e9-4284-b5cf-d5292244187a, please check neutron logs for more information. [ 564.102518] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Traceback (most recent call last): [ 564.102518] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 564.102518] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] yield resources [ 564.102518] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 564.102518] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] self.driver.spawn(context, instance, image_meta, [ 564.102518] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 564.102518] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 564.102518] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 564.102518] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] vm_ref = self.build_virtual_machine(instance, [ 564.102518] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] vif_infos = vmwarevif.get_vif_info(self._session, [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] for vif in network_info: [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] return self._sync_wrapper(fn, *args, **kwargs) [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] self.wait() [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] self[:] = self._gt.wait() [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] return self._exit_event.wait() [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 564.103162] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] result = hub.switch() [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] return self.greenlet.switch() [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] result = function(*args, **kwargs) [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] return func(*args, **kwargs) [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] raise e [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] nwinfo = self.network_api.allocate_for_instance( [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] created_port_ids = self._update_ports_for_instance( [ 564.103888] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] with excutils.save_and_reraise_exception(): [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] self.force_reraise() [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] raise self.value [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] updated_port = self._update_port( [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] _ensure_no_port_binding_failure(port) [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] raise exception.PortBindingFailed(port_id=port['id']) [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] nova.exception.PortBindingFailed: Binding failed for port 4e4c93c0-c1e9-4284-b5cf-d5292244187a, please check neutron logs for more information. [ 564.104731] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] [ 564.107091] env[61898]: INFO nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Terminating instance [ 564.549547] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance d74776d1-f374-4761-976c-f073b3821f42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 564.608955] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Acquiring lock "refresh_cache-c1c15498-af88-4fcf-9a58-7060502bcaf2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.609134] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Acquired lock "refresh_cache-c1c15498-af88-4fcf-9a58-7060502bcaf2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.610485] env[61898]: DEBUG nova.network.neutron [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 564.617810] env[61898]: DEBUG nova.compute.manager [req-fa60bdeb-e236-4578-973c-0969b3282b0a req-8b7ced78-810b-448e-bb04-ce8e53382d4a service nova] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Received event network-changed-4e4c93c0-c1e9-4284-b5cf-d5292244187a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 564.618145] env[61898]: DEBUG nova.compute.manager [req-fa60bdeb-e236-4578-973c-0969b3282b0a req-8b7ced78-810b-448e-bb04-ce8e53382d4a service nova] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Refreshing instance network info cache due to event network-changed-4e4c93c0-c1e9-4284-b5cf-d5292244187a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 564.618197] env[61898]: DEBUG oslo_concurrency.lockutils [req-fa60bdeb-e236-4578-973c-0969b3282b0a req-8b7ced78-810b-448e-bb04-ce8e53382d4a service nova] Acquiring lock "refresh_cache-c1c15498-af88-4fcf-9a58-7060502bcaf2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.670700] env[61898]: DEBUG nova.compute.manager [req-0fed4498-5290-4079-bef3-9c4c6a73cf82 req-76e6fd5c-d4fc-4897-8430-5cd76a02e2b8 service nova] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Received event network-changed-96f6921e-36a7-4acc-929b-d04b099a7893 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 564.670920] env[61898]: DEBUG nova.compute.manager [req-0fed4498-5290-4079-bef3-9c4c6a73cf82 req-76e6fd5c-d4fc-4897-8430-5cd76a02e2b8 service nova] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Refreshing instance network info cache due to event network-changed-96f6921e-36a7-4acc-929b-d04b099a7893. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 564.671156] env[61898]: DEBUG oslo_concurrency.lockutils [req-0fed4498-5290-4079-bef3-9c4c6a73cf82 req-76e6fd5c-d4fc-4897-8430-5cd76a02e2b8 service nova] Acquiring lock "refresh_cache-3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 564.674102] env[61898]: DEBUG oslo_concurrency.lockutils [req-0fed4498-5290-4079-bef3-9c4c6a73cf82 req-76e6fd5c-d4fc-4897-8430-5cd76a02e2b8 service nova] Acquired lock "refresh_cache-3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.674102] env[61898]: DEBUG nova.network.neutron [req-0fed4498-5290-4079-bef3-9c4c6a73cf82 req-76e6fd5c-d4fc-4897-8430-5cd76a02e2b8 service nova] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Refreshing network info cache for port 96f6921e-36a7-4acc-929b-d04b099a7893 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 564.684894] env[61898]: ERROR nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 96f6921e-36a7-4acc-929b-d04b099a7893, please check neutron logs for more information. [ 564.684894] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 564.684894] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 564.684894] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 564.684894] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.684894] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 564.684894] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.684894] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 564.684894] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.684894] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 564.684894] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.684894] env[61898]: ERROR nova.compute.manager raise self.value [ 564.684894] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.684894] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 564.684894] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.684894] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 564.685953] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.685953] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 564.685953] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 96f6921e-36a7-4acc-929b-d04b099a7893, please check neutron logs for more information. [ 564.685953] env[61898]: ERROR nova.compute.manager [ 564.685953] env[61898]: Traceback (most recent call last): [ 564.685953] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 564.685953] env[61898]: listener.cb(fileno) [ 564.685953] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.685953] env[61898]: result = function(*args, **kwargs) [ 564.685953] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 564.685953] env[61898]: return func(*args, **kwargs) [ 564.685953] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 564.685953] env[61898]: raise e [ 564.685953] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 564.685953] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 564.685953] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.685953] env[61898]: created_port_ids = self._update_ports_for_instance( [ 564.685953] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.685953] env[61898]: with excutils.save_and_reraise_exception(): [ 564.685953] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.685953] env[61898]: self.force_reraise() [ 564.685953] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.685953] env[61898]: raise self.value [ 564.685953] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.685953] env[61898]: updated_port = self._update_port( [ 564.685953] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.685953] env[61898]: _ensure_no_port_binding_failure(port) [ 564.685953] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.685953] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 564.687495] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 96f6921e-36a7-4acc-929b-d04b099a7893, please check neutron logs for more information. [ 564.687495] env[61898]: Removing descriptor: 20 [ 564.687495] env[61898]: ERROR nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 96f6921e-36a7-4acc-929b-d04b099a7893, please check neutron logs for more information. [ 564.687495] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Traceback (most recent call last): [ 564.687495] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 564.687495] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] yield resources [ 564.687495] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 564.687495] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] self.driver.spawn(context, instance, image_meta, [ 564.687495] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 564.687495] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] self._vmops.spawn(context, instance, image_meta, injected_files, [ 564.687495] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 564.687495] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] vm_ref = self.build_virtual_machine(instance, [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] vif_infos = vmwarevif.get_vif_info(self._session, [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] for vif in network_info: [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] return self._sync_wrapper(fn, *args, **kwargs) [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] self.wait() [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] self[:] = self._gt.wait() [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] return self._exit_event.wait() [ 564.687913] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] result = hub.switch() [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] return self.greenlet.switch() [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] result = function(*args, **kwargs) [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] return func(*args, **kwargs) [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] raise e [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] nwinfo = self.network_api.allocate_for_instance( [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 564.688317] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] created_port_ids = self._update_ports_for_instance( [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] with excutils.save_and_reraise_exception(): [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] self.force_reraise() [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] raise self.value [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] updated_port = self._update_port( [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] _ensure_no_port_binding_failure(port) [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 564.688753] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] raise exception.PortBindingFailed(port_id=port['id']) [ 564.689323] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] nova.exception.PortBindingFailed: Binding failed for port 96f6921e-36a7-4acc-929b-d04b099a7893, please check neutron logs for more information. [ 564.689323] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] [ 564.689323] env[61898]: INFO nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Terminating instance [ 565.053696] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance f2f968db-d4e2-451d-afe6-330196eba6c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 565.146790] env[61898]: DEBUG nova.network.neutron [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.194623] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Acquiring lock "refresh_cache-3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 565.204383] env[61898]: DEBUG nova.network.neutron [req-0fed4498-5290-4079-bef3-9c4c6a73cf82 req-76e6fd5c-d4fc-4897-8430-5cd76a02e2b8 service nova] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.267039] env[61898]: DEBUG nova.network.neutron [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.523195] env[61898]: DEBUG nova.network.neutron [req-0fed4498-5290-4079-bef3-9c4c6a73cf82 req-76e6fd5c-d4fc-4897-8430-5cd76a02e2b8 service nova] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.557695] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 69ad75e8-dcfc-499a-8f18-bf38575968be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 565.771500] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Releasing lock "refresh_cache-c1c15498-af88-4fcf-9a58-7060502bcaf2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 565.771500] env[61898]: DEBUG nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 565.771500] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 565.771500] env[61898]: DEBUG oslo_concurrency.lockutils [req-fa60bdeb-e236-4578-973c-0969b3282b0a req-8b7ced78-810b-448e-bb04-ce8e53382d4a service nova] Acquired lock "refresh_cache-c1c15498-af88-4fcf-9a58-7060502bcaf2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 565.771500] env[61898]: DEBUG nova.network.neutron [req-fa60bdeb-e236-4578-973c-0969b3282b0a req-8b7ced78-810b-448e-bb04-ce8e53382d4a service nova] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Refreshing network info cache for port 4e4c93c0-c1e9-4284-b5cf-d5292244187a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 565.771717] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-896cc0bd-2273-4052-bb98-268ac4c03d6c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.785868] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afee4bf2-6b99-4e15-a3ac-2e1956284bbc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.807448] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c1c15498-af88-4fcf-9a58-7060502bcaf2 could not be found. [ 565.808411] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 565.808411] env[61898]: INFO nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 565.808411] env[61898]: DEBUG oslo.service.loopingcall [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 565.808411] env[61898]: DEBUG nova.compute.manager [-] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 565.808411] env[61898]: DEBUG nova.network.neutron [-] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 565.841569] env[61898]: DEBUG nova.network.neutron [-] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.025942] env[61898]: DEBUG oslo_concurrency.lockutils [req-0fed4498-5290-4079-bef3-9c4c6a73cf82 req-76e6fd5c-d4fc-4897-8430-5cd76a02e2b8 service nova] Releasing lock "refresh_cache-3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 566.027686] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Acquired lock "refresh_cache-3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.027686] env[61898]: DEBUG nova.network.neutron [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 566.061932] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 759d1958-0518-4654-8686-38be0920c29f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 566.309255] env[61898]: DEBUG nova.network.neutron [req-fa60bdeb-e236-4578-973c-0969b3282b0a req-8b7ced78-810b-448e-bb04-ce8e53382d4a service nova] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.345098] env[61898]: DEBUG nova.network.neutron [-] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.463050] env[61898]: DEBUG nova.network.neutron [req-fa60bdeb-e236-4578-973c-0969b3282b0a req-8b7ced78-810b-448e-bb04-ce8e53382d4a service nova] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.567820] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance cfb2f64b-7026-444d-8f86-500445343ac1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 566.571289] env[61898]: DEBUG nova.network.neutron [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 566.796266] env[61898]: DEBUG nova.network.neutron [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 566.850174] env[61898]: INFO nova.compute.manager [-] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Took 1.04 seconds to deallocate network for instance. [ 566.852970] env[61898]: DEBUG nova.compute.claims [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 566.853074] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 566.966273] env[61898]: DEBUG oslo_concurrency.lockutils [req-fa60bdeb-e236-4578-973c-0969b3282b0a req-8b7ced78-810b-448e-bb04-ce8e53382d4a service nova] Releasing lock "refresh_cache-c1c15498-af88-4fcf-9a58-7060502bcaf2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.034156] env[61898]: DEBUG nova.compute.manager [req-499b5a15-3849-4945-8f5e-a2d577811b70 req-c240140a-d613-4edb-87c1-0d2fdf050d0e service nova] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Received event network-vif-deleted-4e4c93c0-c1e9-4284-b5cf-d5292244187a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 567.071234] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance f968f3df-c70b-466b-8aaa-879354f12d3b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 567.299388] env[61898]: DEBUG nova.compute.manager [req-ce3a0dee-fa41-46c1-a608-32fdc88f2e87 req-6088ae8f-870c-4381-b0bf-0690e727d629 service nova] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Received event network-vif-deleted-96f6921e-36a7-4acc-929b-d04b099a7893 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 567.299388] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Releasing lock "refresh_cache-3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.300266] env[61898]: DEBUG nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 567.300568] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 567.301245] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-20029335-c3ec-4808-96a8-60467fa0384d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.312248] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a1d7bf-ae1a-47a3-8bd2-75db807e82d2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.335185] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113 could not be found. [ 567.335424] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 567.335607] env[61898]: INFO nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Took 0.04 seconds to destroy the instance on the hypervisor. [ 567.335840] env[61898]: DEBUG oslo.service.loopingcall [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 567.336066] env[61898]: DEBUG nova.compute.manager [-] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 567.336163] env[61898]: DEBUG nova.network.neutron [-] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 567.370544] env[61898]: DEBUG nova.network.neutron [-] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 567.552899] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "1f70b6e1-b534-40a1-b262-e0a5ce3e425e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.553156] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "1f70b6e1-b534-40a1-b262-e0a5ce3e425e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 567.577097] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 567.873383] env[61898]: DEBUG nova.network.neutron [-] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.080806] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 1f7b6f74-24c1-4db1-9f70-350f307a07b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 568.377222] env[61898]: INFO nova.compute.manager [-] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Took 1.04 seconds to deallocate network for instance. [ 568.381127] env[61898]: DEBUG nova.compute.claims [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 568.381331] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.586476] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 23a0d825-3132-44d5-8b73-a06a0c0e7b1a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 569.089349] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 082fe687-5038-4c31-9b27-f8a5c548cdc1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 569.598155] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance c6e77597-5a5d-4b86-8588-7056828025cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 569.889124] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Acquiring lock "72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.889345] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Lock "72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.104056] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 74a2e109-244c-4349-a0b7-0db9e9d4868e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 570.610253] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 2cfdb95a-8c00-4528-a4bc-55f4ced67a89 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 570.610253] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 570.610253] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 571.034034] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7d826e-1498-4105-b468-4e45cd3aa3ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.041558] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec903cd-26f2-4a33-8603-ca17c2518f99 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.077189] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6335f446-4d7e-4aba-9846-9627c38e7979 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.085937] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec54baa-6119-4297-b9c1-d3bae8418806 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.102381] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.477825] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquiring lock "4a6adf12-7106-46ce-abb0-fe8c5c212905" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.478073] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "4a6adf12-7106-46ce-abb0-fe8c5c212905" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.605957] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 571.801259] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquiring lock "8ac73bda-db02-4427-9730-003561d078ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.801259] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "8ac73bda-db02-4427-9730-003561d078ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.104454] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Acquiring lock "10e3f3dd-165b-4049-8c1f-f561c91717c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.104703] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Lock "10e3f3dd-165b-4049-8c1f-f561c91717c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.119016] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 572.119016] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.660s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.119016] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.172s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 572.120939] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 572.121356] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Getting list of instances from cluster (obj){ [ 572.121356] env[61898]: value = "domain-c8" [ 572.121356] env[61898]: _type = "ClusterComputeResource" [ 572.121356] env[61898]: } {{(pid=61898) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 572.122579] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7044e155-a643-43c3-9031-9e7e0862c3c5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.134012] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Got total of 1 instances {{(pid=61898) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 573.111268] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00870ba-31ae-42fc-b480-f661cd4eea89 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.119200] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616f04fb-b84d-443f-a624-70525f64602c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.154492] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adb69ed-a11c-4af7-84e9-ff9c77bc768a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.162717] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f15d4c-1b99-4613-9f66-27df074a4378 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.178414] env[61898]: DEBUG nova.compute.provider_tree [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 573.681835] env[61898]: DEBUG nova.scheduler.client.report [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 574.189394] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.070s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.189394] env[61898]: ERROR nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6bc5168d-bdb4-4084-89c9-820c06cbff4f, please check neutron logs for more information. [ 574.189394] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Traceback (most recent call last): [ 574.189394] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 574.189394] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] self.driver.spawn(context, instance, image_meta, [ 574.189394] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 574.189394] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 574.189394] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 574.189394] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] vm_ref = self.build_virtual_machine(instance, [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] for vif in network_info: [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] return self._sync_wrapper(fn, *args, **kwargs) [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] self.wait() [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] self[:] = self._gt.wait() [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] return self._exit_event.wait() [ 574.190478] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] result = hub.switch() [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] return self.greenlet.switch() [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] result = function(*args, **kwargs) [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] return func(*args, **kwargs) [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] raise e [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] nwinfo = self.network_api.allocate_for_instance( [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 574.190957] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] created_port_ids = self._update_ports_for_instance( [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] with excutils.save_and_reraise_exception(): [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] self.force_reraise() [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] raise self.value [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] updated_port = self._update_port( [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] _ensure_no_port_binding_failure(port) [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 574.191419] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] raise exception.PortBindingFailed(port_id=port['id']) [ 574.191814] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] nova.exception.PortBindingFailed: Binding failed for port 6bc5168d-bdb4-4084-89c9-820c06cbff4f, please check neutron logs for more information. [ 574.191814] env[61898]: ERROR nova.compute.manager [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] [ 574.191814] env[61898]: DEBUG nova.compute.utils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Binding failed for port 6bc5168d-bdb4-4084-89c9-820c06cbff4f, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 574.193101] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.893s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.194925] env[61898]: INFO nova.compute.claims [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 574.198018] env[61898]: DEBUG nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Build of instance 4b39d9ad-b7d3-4464-b9e0-799440b445e4 was re-scheduled: Binding failed for port 6bc5168d-bdb4-4084-89c9-820c06cbff4f, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 574.198501] env[61898]: DEBUG nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 574.198688] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Acquiring lock "refresh_cache-4b39d9ad-b7d3-4464-b9e0-799440b445e4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.198866] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Acquired lock "refresh_cache-4b39d9ad-b7d3-4464-b9e0-799440b445e4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.199042] env[61898]: DEBUG nova.network.neutron [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 574.727930] env[61898]: DEBUG nova.network.neutron [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 574.879772] env[61898]: DEBUG nova.network.neutron [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.382050] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Releasing lock "refresh_cache-4b39d9ad-b7d3-4464-b9e0-799440b445e4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 575.382349] env[61898]: DEBUG nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 575.382503] env[61898]: DEBUG nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 575.382724] env[61898]: DEBUG nova.network.neutron [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 575.407763] env[61898]: DEBUG nova.network.neutron [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 575.658294] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89f837a-407d-470b-b40b-9180cc766d3b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.665835] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae27a2b-fc5e-4325-938c-aca313dd9d9e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.701951] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d37899e-0f4e-423d-8e0d-8e8ed8a91d0d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.710353] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d9c350-6d41-427d-8198-b9fb637d1959 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.724192] env[61898]: DEBUG nova.compute.provider_tree [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.910800] env[61898]: DEBUG nova.network.neutron [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.232065] env[61898]: DEBUG nova.scheduler.client.report [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 576.417437] env[61898]: INFO nova.compute.manager [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] [instance: 4b39d9ad-b7d3-4464-b9e0-799440b445e4] Took 1.03 seconds to deallocate network for instance. [ 576.738409] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.738409] env[61898]: DEBUG nova.compute.manager [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 576.739658] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.522s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.741082] env[61898]: INFO nova.compute.claims [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 577.247774] env[61898]: DEBUG nova.compute.utils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 577.250649] env[61898]: DEBUG nova.compute.manager [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Not allocating networking since 'none' was specified. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 577.473652] env[61898]: INFO nova.scheduler.client.report [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Deleted allocations for instance 4b39d9ad-b7d3-4464-b9e0-799440b445e4 [ 577.751967] env[61898]: DEBUG nova.compute.manager [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 577.985282] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2406f5e0-3978-4105-9213-92aa3720b4f2 tempest-ImagesOneServerTestJSON-2088851947 tempest-ImagesOneServerTestJSON-2088851947-project-member] Lock "4b39d9ad-b7d3-4464-b9e0-799440b445e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.735s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.173651] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e98b16-03ac-4e6e-834b-e33f817a05da {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.182648] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131cd8d1-e1b5-45ef-a499-c8c5d2322644 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.212978] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3111104-bc50-4bd6-a247-b6c41182c3f2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.220447] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b217fd93-7f26-45da-9a3a-7aff61bbc133 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.233662] env[61898]: DEBUG nova.compute.provider_tree [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.488919] env[61898]: DEBUG nova.compute.manager [None req-16a04361-20bc-4a4e-8787-95c7d2ce0017 tempest-ServersListShow296Test-1560153890 tempest-ServersListShow296Test-1560153890-project-member] [instance: 4357edb7-fac0-4ad2-9746-4a27de976fdd] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 578.737581] env[61898]: DEBUG nova.scheduler.client.report [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 578.765609] env[61898]: DEBUG nova.compute.manager [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 578.796553] env[61898]: DEBUG nova.virt.hardware [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 578.796829] env[61898]: DEBUG nova.virt.hardware [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 578.797013] env[61898]: DEBUG nova.virt.hardware [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.797242] env[61898]: DEBUG nova.virt.hardware [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 578.797395] env[61898]: DEBUG nova.virt.hardware [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.797539] env[61898]: DEBUG nova.virt.hardware [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 578.797796] env[61898]: DEBUG nova.virt.hardware [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 578.797969] env[61898]: DEBUG nova.virt.hardware [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 578.799129] env[61898]: DEBUG nova.virt.hardware [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 578.799358] env[61898]: DEBUG nova.virt.hardware [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 578.800030] env[61898]: DEBUG nova.virt.hardware [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 578.800538] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66515f65-f5a6-4ec2-abfc-b91ff6bcbd10 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.810088] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1964e79-68d0-4adf-83c9-82113691752c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.824093] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 578.830062] env[61898]: DEBUG oslo.service.loopingcall [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 578.830121] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 578.830311] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d21ad856-57ec-4a38-9e50-8532f73df97b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.847133] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 578.847133] env[61898]: value = "task-1240348" [ 578.847133] env[61898]: _type = "Task" [ 578.847133] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.854612] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240348, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.994008] env[61898]: DEBUG nova.compute.manager [None req-16a04361-20bc-4a4e-8787-95c7d2ce0017 tempest-ServersListShow296Test-1560153890 tempest-ServersListShow296Test-1560153890-project-member] [instance: 4357edb7-fac0-4ad2-9746-4a27de976fdd] Instance disappeared before build. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2445}} [ 579.244019] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.502s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.244019] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.297s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.244907] env[61898]: INFO nova.compute.claims [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.357596] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240348, 'name': CreateVM_Task, 'duration_secs': 0.396885} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.357779] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 579.358218] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.358378] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.358725] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 579.358972] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6209d3b-cb16-464d-88f9-96e9f97f4b8c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.363543] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 579.363543] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52609df3-79bd-6284-13a7-f6602cd63785" [ 579.363543] env[61898]: _type = "Task" [ 579.363543] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.371376] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52609df3-79bd-6284-13a7-f6602cd63785, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.510881] env[61898]: DEBUG oslo_concurrency.lockutils [None req-16a04361-20bc-4a4e-8787-95c7d2ce0017 tempest-ServersListShow296Test-1560153890 tempest-ServersListShow296Test-1560153890-project-member] Lock "4357edb7-fac0-4ad2-9746-4a27de976fdd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.626s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.753620] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Acquiring lock "26867719-20c9-44a0-a4bd-f7d1845610a2" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.753620] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Lock "26867719-20c9-44a0-a4bd-f7d1845610a2" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.874636] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52609df3-79bd-6284-13a7-f6602cd63785, 'name': SearchDatastore_Task, 'duration_secs': 0.031924} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.874953] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 579.875203] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 579.875432] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 579.875572] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.875745] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 579.876373] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11a88015-fc54-4d32-aa8d-2bd1c21be61c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.883315] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 579.883491] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 579.884219] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9255d10d-cd1f-463b-9aec-cbce3cc488e6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.889417] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 579.889417] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52cf66dc-232d-af76-93c0-ec1a9225c483" [ 579.889417] env[61898]: _type = "Task" [ 579.889417] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.897210] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cf66dc-232d-af76-93c0-ec1a9225c483, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.013470] env[61898]: DEBUG nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 580.260781] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Lock "26867719-20c9-44a0-a4bd-f7d1845610a2" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.507s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.261486] env[61898]: DEBUG nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 580.405196] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cf66dc-232d-af76-93c0-ec1a9225c483, 'name': SearchDatastore_Task, 'duration_secs': 0.008177} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.405986] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07f98a48-535b-4e25-90ba-750ff17995e6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.417872] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 580.417872] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52de47a9-0e39-ee6e-fd1f-325b02dcb764" [ 580.417872] env[61898]: _type = "Task" [ 580.417872] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.429490] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52de47a9-0e39-ee6e-fd1f-325b02dcb764, 'name': SearchDatastore_Task, 'duration_secs': 0.008155} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.433095] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.433541] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 0dfabd80-a385-4124-af33-083559819d7a/0dfabd80-a385-4124-af33-083559819d7a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 580.434079] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ddc1e4d4-66df-4adc-9501-2d91a931e2c3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.442896] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 580.442896] env[61898]: value = "task-1240349" [ 580.442896] env[61898]: _type = "Task" [ 580.442896] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.457042] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240349, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.541314] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.741699] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f17c00-b813-4b1e-863a-b34d4f0f7631 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.752632] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8450e84e-59a6-412d-9458-b6af5bcc3974 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.785966] env[61898]: DEBUG nova.compute.utils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 580.788195] env[61898]: DEBUG nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 580.788386] env[61898]: DEBUG nova.network.neutron [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 580.790877] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce29216-32f9-43a1-af78-5ba8f78a8ea7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.801248] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f475ec7-7a0f-4eea-a774-47f13b6ce9d1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.823403] env[61898]: DEBUG nova.compute.provider_tree [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.890268] env[61898]: DEBUG nova.policy [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '376c067d85dd4459927098e43b53a695', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd8c243d6354e499b97db8ff9a8d27da7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 580.955797] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240349, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.289714] env[61898]: DEBUG nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 581.326841] env[61898]: DEBUG nova.network.neutron [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Successfully created port: c6001d62-fb77-48d7-8563-7f6acad85cb8 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 581.331014] env[61898]: DEBUG nova.scheduler.client.report [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 581.458780] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240349, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530983} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.458780] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 0dfabd80-a385-4124-af33-083559819d7a/0dfabd80-a385-4124-af33-083559819d7a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 581.458780] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 581.458780] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-24f94e78-23a6-432f-8eab-8e67c3db638b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.470029] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 581.470029] env[61898]: value = "task-1240351" [ 581.470029] env[61898]: _type = "Task" [ 581.470029] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.478539] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240351, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.839345] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.839853] env[61898]: DEBUG nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 581.842606] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.850s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.844510] env[61898]: INFO nova.compute.claims [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 581.976919] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240351, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061927} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.979678] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 581.983624] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293a55d7-5760-412b-9af5-7476b25dcd94 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.009226] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] 0dfabd80-a385-4124-af33-083559819d7a/0dfabd80-a385-4124-af33-083559819d7a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 582.009625] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc42a18f-9c36-4481-8de7-ee1cf1109739 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.031760] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 582.031760] env[61898]: value = "task-1240352" [ 582.031760] env[61898]: _type = "Task" [ 582.031760] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.041856] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240352, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.302108] env[61898]: DEBUG nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 582.331418] env[61898]: DEBUG nova.virt.hardware [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 582.331616] env[61898]: DEBUG nova.virt.hardware [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 582.331879] env[61898]: DEBUG nova.virt.hardware [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 582.332017] env[61898]: DEBUG nova.virt.hardware [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 582.332205] env[61898]: DEBUG nova.virt.hardware [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 582.332356] env[61898]: DEBUG nova.virt.hardware [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 582.332562] env[61898]: DEBUG nova.virt.hardware [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 582.332721] env[61898]: DEBUG nova.virt.hardware [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 582.332885] env[61898]: DEBUG nova.virt.hardware [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 582.333231] env[61898]: DEBUG nova.virt.hardware [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 582.333412] env[61898]: DEBUG nova.virt.hardware [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 582.334833] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e870865d-e560-400b-86c4-1f397af039ad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.343211] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0eb4b0-9e33-4c5b-8c38-4d6f4470f17c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.360319] env[61898]: DEBUG nova.compute.utils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 582.364298] env[61898]: DEBUG nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 582.364511] env[61898]: DEBUG nova.network.neutron [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 582.461025] env[61898]: DEBUG nova.policy [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fba3c68fac24de18fbebaadfa0b93a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46fbee4035b847ef8b8150edf2bd6aeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 582.480175] env[61898]: DEBUG nova.compute.manager [req-9ca8073a-016d-4045-8472-11b5f42c08c9 req-88f296c7-24dc-4d7f-a2bb-b429485b478c service nova] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Received event network-changed-c6001d62-fb77-48d7-8563-7f6acad85cb8 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 582.480451] env[61898]: DEBUG nova.compute.manager [req-9ca8073a-016d-4045-8472-11b5f42c08c9 req-88f296c7-24dc-4d7f-a2bb-b429485b478c service nova] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Refreshing instance network info cache due to event network-changed-c6001d62-fb77-48d7-8563-7f6acad85cb8. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 582.480675] env[61898]: DEBUG oslo_concurrency.lockutils [req-9ca8073a-016d-4045-8472-11b5f42c08c9 req-88f296c7-24dc-4d7f-a2bb-b429485b478c service nova] Acquiring lock "refresh_cache-cf94c3f2-a4db-479f-8251-f2e403697678" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.480815] env[61898]: DEBUG oslo_concurrency.lockutils [req-9ca8073a-016d-4045-8472-11b5f42c08c9 req-88f296c7-24dc-4d7f-a2bb-b429485b478c service nova] Acquired lock "refresh_cache-cf94c3f2-a4db-479f-8251-f2e403697678" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.480967] env[61898]: DEBUG nova.network.neutron [req-9ca8073a-016d-4045-8472-11b5f42c08c9 req-88f296c7-24dc-4d7f-a2bb-b429485b478c service nova] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Refreshing network info cache for port c6001d62-fb77-48d7-8563-7f6acad85cb8 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 582.542722] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240352, 'name': ReconfigVM_Task, 'duration_secs': 0.271859} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.543081] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Reconfigured VM instance instance-00000011 to attach disk [datastore2] 0dfabd80-a385-4124-af33-083559819d7a/0dfabd80-a385-4124-af33-083559819d7a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 582.544594] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d3507f7-9c5f-4adc-83d3-2dc57f5a5c17 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.553854] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 582.553854] env[61898]: value = "task-1240353" [ 582.553854] env[61898]: _type = "Task" [ 582.553854] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.564939] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240353, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.617245] env[61898]: ERROR nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c6001d62-fb77-48d7-8563-7f6acad85cb8, please check neutron logs for more information. [ 582.617245] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 582.617245] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 582.617245] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 582.617245] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 582.617245] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 582.617245] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 582.617245] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 582.617245] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 582.617245] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 582.617245] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 582.617245] env[61898]: ERROR nova.compute.manager raise self.value [ 582.617245] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 582.617245] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 582.617245] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 582.617245] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 582.617639] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 582.617639] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 582.617639] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c6001d62-fb77-48d7-8563-7f6acad85cb8, please check neutron logs for more information. [ 582.617639] env[61898]: ERROR nova.compute.manager [ 582.617639] env[61898]: Traceback (most recent call last): [ 582.617639] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 582.617639] env[61898]: listener.cb(fileno) [ 582.617639] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 582.617639] env[61898]: result = function(*args, **kwargs) [ 582.617639] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 582.617639] env[61898]: return func(*args, **kwargs) [ 582.617639] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 582.617639] env[61898]: raise e [ 582.617639] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 582.617639] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 582.617639] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 582.617639] env[61898]: created_port_ids = self._update_ports_for_instance( [ 582.617639] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 582.617639] env[61898]: with excutils.save_and_reraise_exception(): [ 582.617639] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 582.617639] env[61898]: self.force_reraise() [ 582.617639] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 582.617639] env[61898]: raise self.value [ 582.617639] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 582.617639] env[61898]: updated_port = self._update_port( [ 582.617639] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 582.617639] env[61898]: _ensure_no_port_binding_failure(port) [ 582.617639] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 582.617639] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 582.618236] env[61898]: nova.exception.PortBindingFailed: Binding failed for port c6001d62-fb77-48d7-8563-7f6acad85cb8, please check neutron logs for more information. [ 582.618236] env[61898]: Removing descriptor: 19 [ 582.618279] env[61898]: ERROR nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c6001d62-fb77-48d7-8563-7f6acad85cb8, please check neutron logs for more information. [ 582.618279] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Traceback (most recent call last): [ 582.618279] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 582.618279] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] yield resources [ 582.618279] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 582.618279] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] self.driver.spawn(context, instance, image_meta, [ 582.618279] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 582.618279] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] self._vmops.spawn(context, instance, image_meta, injected_files, [ 582.618279] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 582.618279] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] vm_ref = self.build_virtual_machine(instance, [ 582.618279] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] vif_infos = vmwarevif.get_vif_info(self._session, [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] for vif in network_info: [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] return self._sync_wrapper(fn, *args, **kwargs) [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] self.wait() [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] self[:] = self._gt.wait() [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] return self._exit_event.wait() [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 582.618500] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] result = hub.switch() [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] return self.greenlet.switch() [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] result = function(*args, **kwargs) [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] return func(*args, **kwargs) [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] raise e [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] nwinfo = self.network_api.allocate_for_instance( [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] created_port_ids = self._update_ports_for_instance( [ 582.618757] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] with excutils.save_and_reraise_exception(): [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] self.force_reraise() [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] raise self.value [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] updated_port = self._update_port( [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] _ensure_no_port_binding_failure(port) [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] raise exception.PortBindingFailed(port_id=port['id']) [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] nova.exception.PortBindingFailed: Binding failed for port c6001d62-fb77-48d7-8563-7f6acad85cb8, please check neutron logs for more information. [ 582.619027] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] [ 582.619318] env[61898]: INFO nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Terminating instance [ 582.856873] env[61898]: DEBUG nova.network.neutron [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Successfully created port: 4bb4e208-9c5a-4d86-974a-3c49a938ab12 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.869024] env[61898]: DEBUG nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 583.008098] env[61898]: DEBUG nova.network.neutron [req-9ca8073a-016d-4045-8472-11b5f42c08c9 req-88f296c7-24dc-4d7f-a2bb-b429485b478c service nova] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 583.064748] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240353, 'name': Rename_Task, 'duration_secs': 0.135733} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.068389] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 583.068389] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8fd4d3ad-47ba-467e-a756-56db1d3a7f5b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.079207] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 583.079207] env[61898]: value = "task-1240355" [ 583.079207] env[61898]: _type = "Task" [ 583.079207] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.091540] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240355, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.128244] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Acquiring lock "refresh_cache-cf94c3f2-a4db-479f-8251-f2e403697678" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.182576] env[61898]: DEBUG nova.network.neutron [req-9ca8073a-016d-4045-8472-11b5f42c08c9 req-88f296c7-24dc-4d7f-a2bb-b429485b478c service nova] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.342183] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ed93f3-cd7a-4a59-8df1-20d42846a192 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.351217] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702fa1a5-c07f-4cdb-a935-88b86dd3d271 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.389160] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33df910-7d82-4a66-9089-351615547d1d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.400020] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af5b6f1-cb02-4b51-91c5-a170268fdb85 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.418259] env[61898]: DEBUG nova.compute.provider_tree [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.594020] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240355, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.685754] env[61898]: DEBUG oslo_concurrency.lockutils [req-9ca8073a-016d-4045-8472-11b5f42c08c9 req-88f296c7-24dc-4d7f-a2bb-b429485b478c service nova] Releasing lock "refresh_cache-cf94c3f2-a4db-479f-8251-f2e403697678" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.686094] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Acquired lock "refresh_cache-cf94c3f2-a4db-479f-8251-f2e403697678" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.686315] env[61898]: DEBUG nova.network.neutron [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 583.822234] env[61898]: ERROR nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4bb4e208-9c5a-4d86-974a-3c49a938ab12, please check neutron logs for more information. [ 583.822234] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 583.822234] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 583.822234] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 583.822234] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.822234] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 583.822234] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.822234] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 583.822234] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.822234] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 583.822234] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.822234] env[61898]: ERROR nova.compute.manager raise self.value [ 583.822234] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.822234] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 583.822234] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.822234] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 583.822647] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.822647] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 583.822647] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4bb4e208-9c5a-4d86-974a-3c49a938ab12, please check neutron logs for more information. [ 583.822647] env[61898]: ERROR nova.compute.manager [ 583.822647] env[61898]: Traceback (most recent call last): [ 583.822647] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 583.822647] env[61898]: listener.cb(fileno) [ 583.822647] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.822647] env[61898]: result = function(*args, **kwargs) [ 583.822647] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 583.822647] env[61898]: return func(*args, **kwargs) [ 583.822647] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 583.822647] env[61898]: raise e [ 583.822647] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 583.822647] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 583.822647] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.822647] env[61898]: created_port_ids = self._update_ports_for_instance( [ 583.822647] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.822647] env[61898]: with excutils.save_and_reraise_exception(): [ 583.822647] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.822647] env[61898]: self.force_reraise() [ 583.822647] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.822647] env[61898]: raise self.value [ 583.822647] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.822647] env[61898]: updated_port = self._update_port( [ 583.822647] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.822647] env[61898]: _ensure_no_port_binding_failure(port) [ 583.822647] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.822647] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 583.823308] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 4bb4e208-9c5a-4d86-974a-3c49a938ab12, please check neutron logs for more information. [ 583.823308] env[61898]: Removing descriptor: 20 [ 583.892362] env[61898]: DEBUG nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 583.896566] env[61898]: DEBUG nova.compute.manager [req-72db030d-7f9e-4cce-92f9-b84d69aea1c0 req-f29d49d4-2b27-4c66-88b3-1fa1ac7c8157 service nova] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Received event network-changed-4bb4e208-9c5a-4d86-974a-3c49a938ab12 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 583.896761] env[61898]: DEBUG nova.compute.manager [req-72db030d-7f9e-4cce-92f9-b84d69aea1c0 req-f29d49d4-2b27-4c66-88b3-1fa1ac7c8157 service nova] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Refreshing instance network info cache due to event network-changed-4bb4e208-9c5a-4d86-974a-3c49a938ab12. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 583.897794] env[61898]: DEBUG oslo_concurrency.lockutils [req-72db030d-7f9e-4cce-92f9-b84d69aea1c0 req-f29d49d4-2b27-4c66-88b3-1fa1ac7c8157 service nova] Acquiring lock "refresh_cache-31aa8536-1597-4b61-b069-80daf5306dd6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.897794] env[61898]: DEBUG oslo_concurrency.lockutils [req-72db030d-7f9e-4cce-92f9-b84d69aea1c0 req-f29d49d4-2b27-4c66-88b3-1fa1ac7c8157 service nova] Acquired lock "refresh_cache-31aa8536-1597-4b61-b069-80daf5306dd6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.897794] env[61898]: DEBUG nova.network.neutron [req-72db030d-7f9e-4cce-92f9-b84d69aea1c0 req-f29d49d4-2b27-4c66-88b3-1fa1ac7c8157 service nova] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Refreshing network info cache for port 4bb4e208-9c5a-4d86-974a-3c49a938ab12 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 583.923845] env[61898]: DEBUG nova.scheduler.client.report [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 583.932033] env[61898]: DEBUG nova.virt.hardware [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 583.932033] env[61898]: DEBUG nova.virt.hardware [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 583.932033] env[61898]: DEBUG nova.virt.hardware [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 583.932247] env[61898]: DEBUG nova.virt.hardware [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 583.932247] env[61898]: DEBUG nova.virt.hardware [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 583.932247] env[61898]: DEBUG nova.virt.hardware [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 583.932247] env[61898]: DEBUG nova.virt.hardware [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 583.932247] env[61898]: DEBUG nova.virt.hardware [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 583.932367] env[61898]: DEBUG nova.virt.hardware [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 583.932367] env[61898]: DEBUG nova.virt.hardware [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 583.932367] env[61898]: DEBUG nova.virt.hardware [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 583.933439] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28389fc-4a9e-4f48-b21c-43ef41e733d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.946555] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42030b5-5185-4e92-b38c-b226f21069a9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.963058] env[61898]: ERROR nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4bb4e208-9c5a-4d86-974a-3c49a938ab12, please check neutron logs for more information. [ 583.963058] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Traceback (most recent call last): [ 583.963058] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 583.963058] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] yield resources [ 583.963058] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 583.963058] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] self.driver.spawn(context, instance, image_meta, [ 583.963058] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 583.963058] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 583.963058] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 583.963058] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] vm_ref = self.build_virtual_machine(instance, [ 583.963058] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] vif_infos = vmwarevif.get_vif_info(self._session, [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] for vif in network_info: [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] return self._sync_wrapper(fn, *args, **kwargs) [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] self.wait() [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] self[:] = self._gt.wait() [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] return self._exit_event.wait() [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 583.963513] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] current.throw(*self._exc) [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] result = function(*args, **kwargs) [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] return func(*args, **kwargs) [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] raise e [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] nwinfo = self.network_api.allocate_for_instance( [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] created_port_ids = self._update_ports_for_instance( [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] with excutils.save_and_reraise_exception(): [ 583.964064] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.964512] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] self.force_reraise() [ 583.964512] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.964512] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] raise self.value [ 583.964512] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 583.964512] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] updated_port = self._update_port( [ 583.964512] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.964512] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] _ensure_no_port_binding_failure(port) [ 583.964512] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.964512] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] raise exception.PortBindingFailed(port_id=port['id']) [ 583.964512] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] nova.exception.PortBindingFailed: Binding failed for port 4bb4e208-9c5a-4d86-974a-3c49a938ab12, please check neutron logs for more information. [ 583.964512] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] [ 583.964512] env[61898]: INFO nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Terminating instance [ 584.074371] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Acquiring lock "03ba4dad-5c58-4582-a36e-95de69b37474" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.074670] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Lock "03ba4dad-5c58-4582-a36e-95de69b37474" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.091836] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240355, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.207480] env[61898]: DEBUG nova.network.neutron [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.301309] env[61898]: DEBUG nova.network.neutron [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.417611] env[61898]: DEBUG nova.network.neutron [req-72db030d-7f9e-4cce-92f9-b84d69aea1c0 req-f29d49d4-2b27-4c66-88b3-1fa1ac7c8157 service nova] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.439146] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.439638] env[61898]: DEBUG nova.compute.manager [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 584.442153] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.082s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.466967] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquiring lock "refresh_cache-31aa8536-1597-4b61-b069-80daf5306dd6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.480717] env[61898]: DEBUG nova.network.neutron [req-72db030d-7f9e-4cce-92f9-b84d69aea1c0 req-f29d49d4-2b27-4c66-88b3-1fa1ac7c8157 service nova] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.550242] env[61898]: DEBUG nova.compute.manager [req-b34c2e28-40ab-4164-bdc4-56bdcf721442 req-5658e6be-e8bc-4fec-a5d6-e8d9a1276b13 service nova] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Received event network-vif-deleted-c6001d62-fb77-48d7-8563-7f6acad85cb8 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 584.596277] env[61898]: DEBUG oslo_vmware.api [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240355, 'name': PowerOnVM_Task, 'duration_secs': 1.275766} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.596560] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 584.596769] env[61898]: INFO nova.compute.manager [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Took 5.83 seconds to spawn the instance on the hypervisor. [ 584.596937] env[61898]: DEBUG nova.compute.manager [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 584.598457] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e2fc91-df7d-43c8-a475-b9277202ab61 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.804999] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Releasing lock "refresh_cache-cf94c3f2-a4db-479f-8251-f2e403697678" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.805458] env[61898]: DEBUG nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 584.805656] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 584.805953] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-07fc5f94-9d5f-440d-9de4-cf301ec3b020 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.816410] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242c7227-de74-42e1-b9e7-42dde2f6b3de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.840742] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cf94c3f2-a4db-479f-8251-f2e403697678 could not be found. [ 584.840986] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 584.841193] env[61898]: INFO nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Took 0.04 seconds to destroy the instance on the hypervisor. [ 584.841449] env[61898]: DEBUG oslo.service.loopingcall [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 584.841692] env[61898]: DEBUG nova.compute.manager [-] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 584.841788] env[61898]: DEBUG nova.network.neutron [-] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 584.858443] env[61898]: DEBUG nova.network.neutron [-] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 584.950446] env[61898]: DEBUG nova.compute.utils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 584.952576] env[61898]: DEBUG nova.compute.manager [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Not allocating networking since 'none' was specified. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 584.984610] env[61898]: DEBUG oslo_concurrency.lockutils [req-72db030d-7f9e-4cce-92f9-b84d69aea1c0 req-f29d49d4-2b27-4c66-88b3-1fa1ac7c8157 service nova] Releasing lock "refresh_cache-31aa8536-1597-4b61-b069-80daf5306dd6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.984610] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquired lock "refresh_cache-31aa8536-1597-4b61-b069-80daf5306dd6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.984610] env[61898]: DEBUG nova.network.neutron [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.116812] env[61898]: INFO nova.compute.manager [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Took 39.84 seconds to build instance. [ 585.361868] env[61898]: DEBUG nova.network.neutron [-] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.367576] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a510c5b-532f-4297-a651-582b777ea617 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.376666] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6cfba4-ea16-494c-8f5c-1a2da0ff8221 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.408426] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f6b61a-e091-4d1a-a4d6-f39975bb037a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.416210] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9ecd9f-51a6-41fe-ae4b-06135374b291 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.430331] env[61898]: DEBUG nova.compute.provider_tree [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.454126] env[61898]: DEBUG nova.compute.manager [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 585.503393] env[61898]: DEBUG nova.network.neutron [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 585.548990] env[61898]: DEBUG nova.network.neutron [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.550932] env[61898]: INFO nova.compute.manager [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Rebuilding instance [ 585.587658] env[61898]: DEBUG nova.compute.manager [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 585.588516] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923e7072-6fda-4982-974b-24da6503d242 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.621559] env[61898]: DEBUG oslo_concurrency.lockutils [None req-580c8497-1ea0-48f0-aa4a-c29898d15690 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "0dfabd80-a385-4124-af33-083559819d7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.299s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.864274] env[61898]: INFO nova.compute.manager [-] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Took 1.02 seconds to deallocate network for instance. [ 585.866933] env[61898]: DEBUG nova.compute.claims [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 585.867180] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.919470] env[61898]: DEBUG nova.compute.manager [req-ff13d465-2f98-4266-b062-8aaa634d3b53 req-b3672676-5388-4845-8d9d-b94e1f370c9c service nova] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Received event network-vif-deleted-4bb4e208-9c5a-4d86-974a-3c49a938ab12 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 585.933547] env[61898]: DEBUG nova.scheduler.client.report [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 586.051474] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Releasing lock "refresh_cache-31aa8536-1597-4b61-b069-80daf5306dd6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.051947] env[61898]: DEBUG nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 586.052157] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 586.052758] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c9768c3a-4c81-4ec4-af8f-60a615b7a3b7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.064615] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a44a25-ad1f-481d-ac13-3e1ea1306662 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.089966] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 31aa8536-1597-4b61-b069-80daf5306dd6 could not be found. [ 586.090236] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 586.090420] env[61898]: INFO nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 586.090662] env[61898]: DEBUG oslo.service.loopingcall [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.090901] env[61898]: DEBUG nova.compute.manager [-] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 586.090957] env[61898]: DEBUG nova.network.neutron [-] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 586.107449] env[61898]: DEBUG nova.network.neutron [-] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.126196] env[61898]: DEBUG nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 586.438447] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.996s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.439354] env[61898]: ERROR nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6398b5f8-2603-4da2-b3ce-44653a7617a8, please check neutron logs for more information. [ 586.439354] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Traceback (most recent call last): [ 586.439354] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 586.439354] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] self.driver.spawn(context, instance, image_meta, [ 586.439354] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 586.439354] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 586.439354] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 586.439354] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] vm_ref = self.build_virtual_machine(instance, [ 586.439354] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 586.439354] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] vif_infos = vmwarevif.get_vif_info(self._session, [ 586.439354] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] for vif in network_info: [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] return self._sync_wrapper(fn, *args, **kwargs) [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] self.wait() [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] self[:] = self._gt.wait() [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] return self._exit_event.wait() [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] result = hub.switch() [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 586.439688] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] return self.greenlet.switch() [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] result = function(*args, **kwargs) [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] return func(*args, **kwargs) [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] raise e [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] nwinfo = self.network_api.allocate_for_instance( [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] created_port_ids = self._update_ports_for_instance( [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] with excutils.save_and_reraise_exception(): [ 586.439995] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 586.440330] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] self.force_reraise() [ 586.440330] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 586.440330] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] raise self.value [ 586.440330] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 586.440330] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] updated_port = self._update_port( [ 586.440330] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 586.440330] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] _ensure_no_port_binding_failure(port) [ 586.440330] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 586.440330] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] raise exception.PortBindingFailed(port_id=port['id']) [ 586.440330] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] nova.exception.PortBindingFailed: Binding failed for port 6398b5f8-2603-4da2-b3ce-44653a7617a8, please check neutron logs for more information. [ 586.440330] env[61898]: ERROR nova.compute.manager [instance: b521cc8c-e214-467f-8399-55f075b9bba3] [ 586.441709] env[61898]: DEBUG nova.compute.utils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Binding failed for port 6398b5f8-2603-4da2-b3ce-44653a7617a8, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 586.443058] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.425s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.444331] env[61898]: INFO nova.compute.claims [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.448778] env[61898]: DEBUG nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Build of instance b521cc8c-e214-467f-8399-55f075b9bba3 was re-scheduled: Binding failed for port 6398b5f8-2603-4da2-b3ce-44653a7617a8, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 586.449700] env[61898]: DEBUG nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 586.449700] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-b521cc8c-e214-467f-8399-55f075b9bba3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.449700] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-b521cc8c-e214-467f-8399-55f075b9bba3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.449819] env[61898]: DEBUG nova.network.neutron [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 586.467035] env[61898]: DEBUG nova.compute.manager [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 586.494506] env[61898]: DEBUG nova.virt.hardware [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 586.494745] env[61898]: DEBUG nova.virt.hardware [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 586.494899] env[61898]: DEBUG nova.virt.hardware [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 586.495125] env[61898]: DEBUG nova.virt.hardware [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 586.495341] env[61898]: DEBUG nova.virt.hardware [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 586.495491] env[61898]: DEBUG nova.virt.hardware [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 586.495695] env[61898]: DEBUG nova.virt.hardware [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 586.496372] env[61898]: DEBUG nova.virt.hardware [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 586.496372] env[61898]: DEBUG nova.virt.hardware [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 586.496372] env[61898]: DEBUG nova.virt.hardware [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 586.496372] env[61898]: DEBUG nova.virt.hardware [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 586.497992] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c90bb3-c4d9-4748-9103-90774ccef5b1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.507438] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b79a1f5-bec2-49c9-af79-75bbd5e50719 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.522275] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 586.527843] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Creating folder: Project (a076ce4b374a40919a81b28d667abaa1). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 586.528123] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af846fc0-1e24-433d-8802-fcdda26f8fdf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.602009] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Created folder: Project (a076ce4b374a40919a81b28d667abaa1) in parent group-v267550. [ 586.602231] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Creating folder: Instances. Parent ref: group-v267561. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 586.602475] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e68d755-5140-469b-9d81-c06fa07c4a6e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.604619] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 586.604737] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9e69df7-91d4-4e2b-adc1-580b3945d8b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.608292] env[61898]: DEBUG nova.network.neutron [-] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.611117] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 586.611117] env[61898]: value = "task-1240359" [ 586.611117] env[61898]: _type = "Task" [ 586.611117] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.615728] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Created folder: Instances in parent group-v267561. [ 586.616138] env[61898]: DEBUG oslo.service.loopingcall [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 586.616319] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 586.619586] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3891bac9-be29-40e8-af5e-a2c9a5cc6b60 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.631782] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240359, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.640909] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 586.640909] env[61898]: value = "task-1240360" [ 586.640909] env[61898]: _type = "Task" [ 586.640909] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.648395] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240360, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.650806] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.968256] env[61898]: DEBUG nova.network.neutron [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 587.046978] env[61898]: DEBUG nova.network.neutron [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.111257] env[61898]: INFO nova.compute.manager [-] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Took 1.02 seconds to deallocate network for instance. [ 587.118385] env[61898]: DEBUG nova.compute.claims [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 587.119038] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.124594] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240359, 'name': PowerOffVM_Task, 'duration_secs': 0.122246} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.124783] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 587.125015] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 587.125791] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa98c73-b064-4c7f-9c55-eea04462fb28 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.133535] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 587.133779] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d0c13ae0-edcd-438e-b0f7-e121bdb06296 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.149792] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240360, 'name': CreateVM_Task, 'duration_secs': 0.26226} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.149937] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 587.150819] env[61898]: DEBUG oslo_vmware.service [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a27cf7-bd21-42d9-a6a4-d4321a7f3b84 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.158278] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.158378] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.158750] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 587.159273] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-359fbffd-5cb2-4492-b9b9-4daaa5a7b854 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.165630] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 587.165630] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52cd16e6-b78e-f801-66c6-81be306be901" [ 587.165630] env[61898]: _type = "Task" [ 587.165630] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.173657] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 587.173904] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 587.174098] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Deleting the datastore file [datastore2] 0dfabd80-a385-4124-af33-083559819d7a {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 587.174382] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6959a9bb-2515-465b-bead-cb32b0f8514d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.180089] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cd16e6-b78e-f801-66c6-81be306be901, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.187268] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 587.187268] env[61898]: value = "task-1240363" [ 587.187268] env[61898]: _type = "Task" [ 587.187268] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.198205] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240363, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.551694] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-b521cc8c-e214-467f-8399-55f075b9bba3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.551935] env[61898]: DEBUG nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 587.552126] env[61898]: DEBUG nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 587.552294] env[61898]: DEBUG nova.network.neutron [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 587.570618] env[61898]: DEBUG nova.network.neutron [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 587.679445] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.679847] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 587.680169] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.680391] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.680646] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 587.681079] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f863649-a5a5-4537-9fba-61c54ff4a632 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.693796] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 587.693929] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 587.697557] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30157379-56a3-4571-81de-bd1f781e7655 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.700485] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240363, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124638} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.703203] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 587.703823] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 587.703823] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 587.711077] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e641d70-1e94-4470-915e-99d0b6f47aa5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.718742] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 587.718742] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52624699-f2c9-51e2-7a7c-d52e724a58f5" [ 587.718742] env[61898]: _type = "Task" [ 587.718742] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.727497] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52624699-f2c9-51e2-7a7c-d52e724a58f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.869138] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e4a010-4c1b-495e-9f9f-48bd25d0adb5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.878951] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3569ef9c-1861-4749-b551-1b0f93dd50e8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.910467] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0643825f-0729-4b78-a93c-1e31da57bb9a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.919012] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b436760-85c6-418d-b645-bf52de8d4d32 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.934382] env[61898]: DEBUG nova.compute.provider_tree [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.076405] env[61898]: DEBUG nova.network.neutron [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.234897] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Preparing fetch location {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 588.234897] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Creating directory with path [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 588.234897] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11591463-85dc-49aa-bf96-416690834c9f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.248917] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Created directory with path [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 588.249632] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Fetch image to [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 588.249632] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Downloading image file data e07a6c11-ab12-4187-81fc-1a28a9d1e65d to [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk on the data store datastore1 {{(pid=61898) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 588.250155] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5c3214-8428-4372-a2fc-26e322f00862 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.259037] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc95ba2a-8375-45b7-9297-60e62eeb068b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.269883] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c940c88-631d-4625-96ca-45f6e9db2ba7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.306275] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1767c756-9def-4163-844a-517da01f6916 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.313795] env[61898]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f6860bf2-3058-4acc-9c71-5155304706f3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.415021] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Downloading image file data e07a6c11-ab12-4187-81fc-1a28a9d1e65d to the data store datastore1 {{(pid=61898) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 588.438021] env[61898]: DEBUG nova.scheduler.client.report [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 588.468741] env[61898]: DEBUG oslo_vmware.rw_handles [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61898) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 588.581268] env[61898]: INFO nova.compute.manager [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: b521cc8c-e214-467f-8399-55f075b9bba3] Took 1.03 seconds to deallocate network for instance. [ 588.732701] env[61898]: DEBUG nova.virt.hardware [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 588.732952] env[61898]: DEBUG nova.virt.hardware [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 588.733124] env[61898]: DEBUG nova.virt.hardware [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 588.733307] env[61898]: DEBUG nova.virt.hardware [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 588.733450] env[61898]: DEBUG nova.virt.hardware [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 588.733594] env[61898]: DEBUG nova.virt.hardware [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 588.733799] env[61898]: DEBUG nova.virt.hardware [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 588.733952] env[61898]: DEBUG nova.virt.hardware [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 588.734127] env[61898]: DEBUG nova.virt.hardware [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 588.734286] env[61898]: DEBUG nova.virt.hardware [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 588.734455] env[61898]: DEBUG nova.virt.hardware [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 588.735331] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7739b34-68bc-4435-9bf8-e178b540cab4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.749499] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4caf57fc-e75b-48ae-859d-01a3b07670e2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.766274] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 588.772052] env[61898]: DEBUG oslo.service.loopingcall [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.775561] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 588.775997] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fea3ff32-428e-472e-a9d7-beb5d9138ad1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.794338] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 588.794338] env[61898]: value = "task-1240364" [ 588.794338] env[61898]: _type = "Task" [ 588.794338] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.803618] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240364, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.946334] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.503s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.946989] env[61898]: DEBUG nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 588.950053] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.487s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.951663] env[61898]: INFO nova.compute.claims [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.304253] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240364, 'name': CreateVM_Task, 'duration_secs': 0.302441} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.305594] env[61898]: DEBUG oslo_vmware.rw_handles [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Completed reading data from the image iterator. {{(pid=61898) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 589.305594] env[61898]: DEBUG oslo_vmware.rw_handles [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 589.305748] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 589.306343] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.306498] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.306802] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 589.307067] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddf34dce-68d1-4d4c-b066-d1141c073068 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.312498] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 589.312498] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52bf214d-5017-2b61-2b63-070e02fdbb73" [ 589.312498] env[61898]: _type = "Task" [ 589.312498] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.328704] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.328977] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 589.329193] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.368486] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Downloaded image file data e07a6c11-ab12-4187-81fc-1a28a9d1e65d to vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk on the data store datastore1 {{(pid=61898) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 589.370757] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Caching image {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 589.370996] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Copying Virtual Disk [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk to [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 589.371295] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-187ab1bd-523d-4200-b7e9-3158ac6ab818 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.380384] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 589.380384] env[61898]: value = "task-1240366" [ 589.380384] env[61898]: _type = "Task" [ 589.380384] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.390439] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240366, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.456098] env[61898]: DEBUG nova.compute.utils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 589.459745] env[61898]: DEBUG nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 589.459926] env[61898]: DEBUG nova.network.neutron [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 589.526919] env[61898]: DEBUG nova.policy [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c79292a29dc44dd2a6ef93a86635fbf0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3bef361cc3444e2fa89ab65279503995', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 589.626994] env[61898]: INFO nova.scheduler.client.report [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleted allocations for instance b521cc8c-e214-467f-8399-55f075b9bba3 [ 589.891861] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240366, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.904230] env[61898]: DEBUG nova.network.neutron [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Successfully created port: 3e48fdd5-5b31-4b00-adbd-84783f49950c {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 589.961271] env[61898]: DEBUG nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 590.136631] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2439c877-5ec3-4c6b-a1d9-3242fae9e358 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "b521cc8c-e214-467f-8399-55f075b9bba3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.047s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.391644] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240366, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.810877} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.394219] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Copied Virtual Disk [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk to [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 590.394413] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Deleting the datastore file [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/tmp-sparse.vmdk {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 590.394891] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cc4a20a-5e0d-437e-a565-50928fccf008 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.402458] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 590.402458] env[61898]: value = "task-1240367" [ 590.402458] env[61898]: _type = "Task" [ 590.402458] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.407090] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15a03e3-9b41-45c0-bbb3-5ce277445428 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.418097] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65de67e2-d556-455b-ba66-6b8586f9e438 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.421375] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.451011] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50501f63-b5ba-406b-b2fd-96aa13965c95 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.459155] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b515950b-df50-44c9-850b-a924bd500802 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.476845] env[61898]: DEBUG nova.compute.provider_tree [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.644488] env[61898]: DEBUG nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 590.649284] env[61898]: DEBUG nova.compute.manager [req-9693e720-043c-435d-9af8-736397589515 req-24b706d5-44b1-4cc3-8579-27913e7a30b0 service nova] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Received event network-changed-3e48fdd5-5b31-4b00-adbd-84783f49950c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 590.649500] env[61898]: DEBUG nova.compute.manager [req-9693e720-043c-435d-9af8-736397589515 req-24b706d5-44b1-4cc3-8579-27913e7a30b0 service nova] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Refreshing instance network info cache due to event network-changed-3e48fdd5-5b31-4b00-adbd-84783f49950c. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 590.649739] env[61898]: DEBUG oslo_concurrency.lockutils [req-9693e720-043c-435d-9af8-736397589515 req-24b706d5-44b1-4cc3-8579-27913e7a30b0 service nova] Acquiring lock "refresh_cache-ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.649960] env[61898]: DEBUG oslo_concurrency.lockutils [req-9693e720-043c-435d-9af8-736397589515 req-24b706d5-44b1-4cc3-8579-27913e7a30b0 service nova] Acquired lock "refresh_cache-ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.650252] env[61898]: DEBUG nova.network.neutron [req-9693e720-043c-435d-9af8-736397589515 req-24b706d5-44b1-4cc3-8579-27913e7a30b0 service nova] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Refreshing network info cache for port 3e48fdd5-5b31-4b00-adbd-84783f49950c {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 590.816039] env[61898]: ERROR nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3e48fdd5-5b31-4b00-adbd-84783f49950c, please check neutron logs for more information. [ 590.816039] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 590.816039] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 590.816039] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 590.816039] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.816039] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 590.816039] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.816039] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 590.816039] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.816039] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 590.816039] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.816039] env[61898]: ERROR nova.compute.manager raise self.value [ 590.816039] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.816039] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 590.816039] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.816039] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 590.816528] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.816528] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 590.816528] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3e48fdd5-5b31-4b00-adbd-84783f49950c, please check neutron logs for more information. [ 590.816528] env[61898]: ERROR nova.compute.manager [ 590.816528] env[61898]: Traceback (most recent call last): [ 590.816528] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 590.816528] env[61898]: listener.cb(fileno) [ 590.816528] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 590.816528] env[61898]: result = function(*args, **kwargs) [ 590.816528] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 590.816528] env[61898]: return func(*args, **kwargs) [ 590.816528] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 590.816528] env[61898]: raise e [ 590.816528] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 590.816528] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 590.816528] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 590.816528] env[61898]: created_port_ids = self._update_ports_for_instance( [ 590.816528] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 590.816528] env[61898]: with excutils.save_and_reraise_exception(): [ 590.816528] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 590.816528] env[61898]: self.force_reraise() [ 590.816528] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 590.816528] env[61898]: raise self.value [ 590.816528] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 590.816528] env[61898]: updated_port = self._update_port( [ 590.816528] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 590.816528] env[61898]: _ensure_no_port_binding_failure(port) [ 590.816528] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 590.816528] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 590.817254] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 3e48fdd5-5b31-4b00-adbd-84783f49950c, please check neutron logs for more information. [ 590.817254] env[61898]: Removing descriptor: 20 [ 590.913613] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02947} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.913780] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 590.914009] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Moving file from [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb/e07a6c11-ab12-4187-81fc-1a28a9d1e65d to [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d. {{(pid=61898) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 590.914283] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-807f0627-a3ea-43a4-8ef2-5e21a27508c5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.922183] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 590.922183] env[61898]: value = "task-1240368" [ 590.922183] env[61898]: _type = "Task" [ 590.922183] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.931254] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240368, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.980981] env[61898]: DEBUG nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 590.983272] env[61898]: DEBUG nova.scheduler.client.report [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 591.007396] env[61898]: DEBUG nova.virt.hardware [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 591.007632] env[61898]: DEBUG nova.virt.hardware [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 591.007872] env[61898]: DEBUG nova.virt.hardware [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.008078] env[61898]: DEBUG nova.virt.hardware [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 591.008228] env[61898]: DEBUG nova.virt.hardware [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.008372] env[61898]: DEBUG nova.virt.hardware [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 591.008575] env[61898]: DEBUG nova.virt.hardware [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 591.008729] env[61898]: DEBUG nova.virt.hardware [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 591.008890] env[61898]: DEBUG nova.virt.hardware [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 591.009064] env[61898]: DEBUG nova.virt.hardware [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 591.009237] env[61898]: DEBUG nova.virt.hardware [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 591.011707] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddad65c3-ae5c-4bca-980a-74c6ed502cfc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.019113] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df62592-0ef1-4b28-8067-6915d85fd3ec {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.033179] env[61898]: ERROR nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3e48fdd5-5b31-4b00-adbd-84783f49950c, please check neutron logs for more information. [ 591.033179] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Traceback (most recent call last): [ 591.033179] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 591.033179] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] yield resources [ 591.033179] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 591.033179] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] self.driver.spawn(context, instance, image_meta, [ 591.033179] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 591.033179] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 591.033179] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 591.033179] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] vm_ref = self.build_virtual_machine(instance, [ 591.033179] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] vif_infos = vmwarevif.get_vif_info(self._session, [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] for vif in network_info: [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] return self._sync_wrapper(fn, *args, **kwargs) [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] self.wait() [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] self[:] = self._gt.wait() [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] return self._exit_event.wait() [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 591.033592] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] current.throw(*self._exc) [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] result = function(*args, **kwargs) [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] return func(*args, **kwargs) [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] raise e [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] nwinfo = self.network_api.allocate_for_instance( [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] created_port_ids = self._update_ports_for_instance( [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] with excutils.save_and_reraise_exception(): [ 591.033907] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 591.034230] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] self.force_reraise() [ 591.034230] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 591.034230] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] raise self.value [ 591.034230] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 591.034230] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] updated_port = self._update_port( [ 591.034230] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 591.034230] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] _ensure_no_port_binding_failure(port) [ 591.034230] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 591.034230] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] raise exception.PortBindingFailed(port_id=port['id']) [ 591.034230] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] nova.exception.PortBindingFailed: Binding failed for port 3e48fdd5-5b31-4b00-adbd-84783f49950c, please check neutron logs for more information. [ 591.034230] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] [ 591.034230] env[61898]: INFO nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Terminating instance [ 591.177032] env[61898]: DEBUG nova.network.neutron [req-9693e720-043c-435d-9af8-736397589515 req-24b706d5-44b1-4cc3-8579-27913e7a30b0 service nova] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 591.179014] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.232062] env[61898]: DEBUG nova.network.neutron [req-9693e720-043c-435d-9af8-736397589515 req-24b706d5-44b1-4cc3-8579-27913e7a30b0 service nova] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.433758] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240368, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.027823} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.434033] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] File moved {{(pid=61898) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 591.434235] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Cleaning up location [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 591.434364] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Deleting the datastore file [datastore1] vmware_temp/e6566a31-38fd-45fc-ac03-66e89fa8cceb {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 591.434614] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ead66bfd-ad3a-4c92-8783-506de8ed2a36 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.442130] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 591.442130] env[61898]: value = "task-1240370" [ 591.442130] env[61898]: _type = "Task" [ 591.442130] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.450665] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240370, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.488708] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.539s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.489323] env[61898]: DEBUG nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 591.491908] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.639s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.539418] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Acquiring lock "refresh_cache-ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.735194] env[61898]: DEBUG oslo_concurrency.lockutils [req-9693e720-043c-435d-9af8-736397589515 req-24b706d5-44b1-4cc3-8579-27913e7a30b0 service nova] Releasing lock "refresh_cache-ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.735615] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Acquired lock "refresh_cache-ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.735810] env[61898]: DEBUG nova.network.neutron [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 591.925680] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "45138019-b69e-459b-99cf-47a47aa58e40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.925680] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "45138019-b69e-459b-99cf-47a47aa58e40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.955026] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240370, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030522} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.955026] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 591.955026] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3125f392-ec43-45eb-818c-3dd7475b0b44 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.959204] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 591.959204] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52689a41-6bfa-94ca-59c3-9b649e910fd4" [ 591.959204] env[61898]: _type = "Task" [ 591.959204] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.967543] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52689a41-6bfa-94ca-59c3-9b649e910fd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.996320] env[61898]: DEBUG nova.compute.utils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 592.001806] env[61898]: DEBUG nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 592.001806] env[61898]: DEBUG nova.network.neutron [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 592.040530] env[61898]: DEBUG nova.policy [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6142b6dd97fa48199f3edeb8ceef8d70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b549768af40c4edbb845f0e1f27ab52c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 592.257964] env[61898]: DEBUG nova.network.neutron [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.317769] env[61898]: DEBUG nova.network.neutron [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Successfully created port: f1a03b78-20f3-4440-a998-e94de3baca8a {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 592.319792] env[61898]: DEBUG nova.network.neutron [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.396373] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5285fb7c-66c6-42dc-b71e-811f37e5a706 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.404850] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fdb7dff-caa1-448f-a2a9-0e6e02b5293b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.439479] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae65feae-a3c2-44eb-a1ae-f8b03d0e2a19 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.448193] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f1be2f-f76e-4a0f-82a2-a755d7881974 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.463323] env[61898]: DEBUG nova.compute.provider_tree [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.473903] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52689a41-6bfa-94ca-59c3-9b649e910fd4, 'name': SearchDatastore_Task, 'duration_secs': 0.009292} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.474786] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.475094] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 592.475404] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.475649] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 592.477705] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-971c84b5-b00a-43f7-90fc-79150634d061 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.478320] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-594d2d44-170a-47b9-a48b-e5f72d5937a9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.487204] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 592.487204] env[61898]: value = "task-1240371" [ 592.487204] env[61898]: _type = "Task" [ 592.487204] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.488291] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 592.488462] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 592.492572] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21be3423-00ac-4155-a3b7-ec33359be760 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.501792] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 592.501792] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5208219f-e767-2764-7e61-5fad15f0da7a" [ 592.501792] env[61898]: _type = "Task" [ 592.501792] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.502434] env[61898]: DEBUG nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 592.505022] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240371, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.514971] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5208219f-e767-2764-7e61-5fad15f0da7a, 'name': SearchDatastore_Task, 'duration_secs': 0.00859} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.516530] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-430c5609-3c78-4ae2-9c00-898e66cde7d9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.524060] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 592.524060] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]524ffe99-2065-c7ce-9550-c10e4a47e76e" [ 592.524060] env[61898]: _type = "Task" [ 592.524060] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.533941] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524ffe99-2065-c7ce-9550-c10e4a47e76e, 'name': SearchDatastore_Task, 'duration_secs': 0.008631} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.534254] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.534582] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 0dfabd80-a385-4124-af33-083559819d7a/0dfabd80-a385-4124-af33-083559819d7a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 592.534872] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-424931d0-784b-4f44-bcd3-02b59edca11d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.542979] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 592.542979] env[61898]: value = "task-1240372" [ 592.542979] env[61898]: _type = "Task" [ 592.542979] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.553008] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.751827] env[61898]: DEBUG nova.compute.manager [req-d176696b-2935-4d8d-a852-0806eda449e7 req-43d01698-5fa7-412a-a716-58d7914e5ac1 service nova] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Received event network-vif-deleted-3e48fdd5-5b31-4b00-adbd-84783f49950c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 592.825339] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Releasing lock "refresh_cache-ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.825973] env[61898]: DEBUG nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 592.828112] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 592.828112] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-501d7c23-4205-4c77-9faa-754f698bb881 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.840172] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c09b8f-5dab-4ea4-911a-06742102ddfe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.870231] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0 could not be found. [ 592.871093] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 592.871093] env[61898]: INFO nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 592.871326] env[61898]: DEBUG oslo.service.loopingcall [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 592.871411] env[61898]: DEBUG nova.compute.manager [-] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 592.871506] env[61898]: DEBUG nova.network.neutron [-] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 592.887502] env[61898]: DEBUG nova.network.neutron [-] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 592.969103] env[61898]: DEBUG nova.scheduler.client.report [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 592.999351] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240371, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457099} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.999716] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 592.999972] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 593.000335] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-23d31c39-6347-4dd4-9d37-ad1833b559be {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.014986] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 593.014986] env[61898]: value = "task-1240373" [ 593.014986] env[61898]: _type = "Task" [ 593.014986] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.027032] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240373, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.056064] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240372, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.240570] env[61898]: ERROR nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f1a03b78-20f3-4440-a998-e94de3baca8a, please check neutron logs for more information. [ 593.240570] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 593.240570] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 593.240570] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 593.240570] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.240570] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 593.240570] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.240570] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 593.240570] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.240570] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 593.240570] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.240570] env[61898]: ERROR nova.compute.manager raise self.value [ 593.240570] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.240570] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 593.240570] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.240570] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 593.241124] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.241124] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 593.241124] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f1a03b78-20f3-4440-a998-e94de3baca8a, please check neutron logs for more information. [ 593.241124] env[61898]: ERROR nova.compute.manager [ 593.241124] env[61898]: Traceback (most recent call last): [ 593.241124] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 593.241124] env[61898]: listener.cb(fileno) [ 593.241124] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.241124] env[61898]: result = function(*args, **kwargs) [ 593.241124] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 593.241124] env[61898]: return func(*args, **kwargs) [ 593.241124] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 593.241124] env[61898]: raise e [ 593.241124] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 593.241124] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 593.241124] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.241124] env[61898]: created_port_ids = self._update_ports_for_instance( [ 593.241124] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.241124] env[61898]: with excutils.save_and_reraise_exception(): [ 593.241124] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.241124] env[61898]: self.force_reraise() [ 593.241124] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.241124] env[61898]: raise self.value [ 593.241124] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.241124] env[61898]: updated_port = self._update_port( [ 593.241124] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.241124] env[61898]: _ensure_no_port_binding_failure(port) [ 593.241124] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.241124] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 593.241916] env[61898]: nova.exception.PortBindingFailed: Binding failed for port f1a03b78-20f3-4440-a998-e94de3baca8a, please check neutron logs for more information. [ 593.241916] env[61898]: Removing descriptor: 20 [ 593.390811] env[61898]: DEBUG nova.network.neutron [-] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.478030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.985s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.478030] env[61898]: ERROR nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4e4c93c0-c1e9-4284-b5cf-d5292244187a, please check neutron logs for more information. [ 593.478030] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Traceback (most recent call last): [ 593.478030] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 593.478030] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] self.driver.spawn(context, instance, image_meta, [ 593.478030] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 593.478030] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 593.478030] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 593.478030] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] vm_ref = self.build_virtual_machine(instance, [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] vif_infos = vmwarevif.get_vif_info(self._session, [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] for vif in network_info: [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] return self._sync_wrapper(fn, *args, **kwargs) [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] self.wait() [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] self[:] = self._gt.wait() [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] return self._exit_event.wait() [ 593.478554] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] result = hub.switch() [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] return self.greenlet.switch() [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] result = function(*args, **kwargs) [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] return func(*args, **kwargs) [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] raise e [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] nwinfo = self.network_api.allocate_for_instance( [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.478886] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] created_port_ids = self._update_ports_for_instance( [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] with excutils.save_and_reraise_exception(): [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] self.force_reraise() [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] raise self.value [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] updated_port = self._update_port( [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] _ensure_no_port_binding_failure(port) [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.479241] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] raise exception.PortBindingFailed(port_id=port['id']) [ 593.479565] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] nova.exception.PortBindingFailed: Binding failed for port 4e4c93c0-c1e9-4284-b5cf-d5292244187a, please check neutron logs for more information. [ 593.479565] env[61898]: ERROR nova.compute.manager [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] [ 593.479565] env[61898]: DEBUG nova.compute.utils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Binding failed for port 4e4c93c0-c1e9-4284-b5cf-d5292244187a, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 593.479824] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.098s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.482864] env[61898]: DEBUG nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Build of instance c1c15498-af88-4fcf-9a58-7060502bcaf2 was re-scheduled: Binding failed for port 4e4c93c0-c1e9-4284-b5cf-d5292244187a, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 593.483382] env[61898]: DEBUG nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 593.483625] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Acquiring lock "refresh_cache-c1c15498-af88-4fcf-9a58-7060502bcaf2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.483777] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Acquired lock "refresh_cache-c1c15498-af88-4fcf-9a58-7060502bcaf2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.483951] env[61898]: DEBUG nova.network.neutron [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 593.517698] env[61898]: DEBUG nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 593.530987] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240373, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.256593} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.531105] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 593.531861] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e848c8-c278-4b3e-9c6a-61276c0b62ba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.553278] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 593.555560] env[61898]: DEBUG nova.virt.hardware [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 593.555787] env[61898]: DEBUG nova.virt.hardware [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 593.555939] env[61898]: DEBUG nova.virt.hardware [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 593.556132] env[61898]: DEBUG nova.virt.hardware [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 593.556275] env[61898]: DEBUG nova.virt.hardware [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 593.556416] env[61898]: DEBUG nova.virt.hardware [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 593.556615] env[61898]: DEBUG nova.virt.hardware [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 593.556759] env[61898]: DEBUG nova.virt.hardware [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 593.556916] env[61898]: DEBUG nova.virt.hardware [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 593.557082] env[61898]: DEBUG nova.virt.hardware [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 593.557248] env[61898]: DEBUG nova.virt.hardware [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 593.560346] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b955f3df-a630-4aaf-b2c8-7a28e02820ba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.574872] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7219bd-7891-4afc-8670-4ba543af6ed8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.583718] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.709163} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.586380] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 0dfabd80-a385-4124-af33-083559819d7a/0dfabd80-a385-4124-af33-083559819d7a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 593.586603] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 593.586895] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 593.586895] env[61898]: value = "task-1240375" [ 593.586895] env[61898]: _type = "Task" [ 593.586895] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.587474] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91c02145-baee-4e76-b432-a887a43e599c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.595110] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e350fc-c9c7-4e83-bdbe-459b738371b2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.599981] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 593.599981] env[61898]: value = "task-1240376" [ 593.599981] env[61898]: _type = "Task" [ 593.599981] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.614769] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240375, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.615952] env[61898]: ERROR nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f1a03b78-20f3-4440-a998-e94de3baca8a, please check neutron logs for more information. [ 593.615952] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] Traceback (most recent call last): [ 593.615952] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 593.615952] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] yield resources [ 593.615952] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 593.615952] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] self.driver.spawn(context, instance, image_meta, [ 593.615952] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 593.615952] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 593.615952] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 593.615952] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] vm_ref = self.build_virtual_machine(instance, [ 593.615952] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] vif_infos = vmwarevif.get_vif_info(self._session, [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] for vif in network_info: [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] return self._sync_wrapper(fn, *args, **kwargs) [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] self.wait() [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] self[:] = self._gt.wait() [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] return self._exit_event.wait() [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 593.616473] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] current.throw(*self._exc) [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] result = function(*args, **kwargs) [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] return func(*args, **kwargs) [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] raise e [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] nwinfo = self.network_api.allocate_for_instance( [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] created_port_ids = self._update_ports_for_instance( [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] with excutils.save_and_reraise_exception(): [ 593.616876] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.617241] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] self.force_reraise() [ 593.617241] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.617241] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] raise self.value [ 593.617241] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 593.617241] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] updated_port = self._update_port( [ 593.617241] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.617241] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] _ensure_no_port_binding_failure(port) [ 593.617241] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.617241] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] raise exception.PortBindingFailed(port_id=port['id']) [ 593.617241] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] nova.exception.PortBindingFailed: Binding failed for port f1a03b78-20f3-4440-a998-e94de3baca8a, please check neutron logs for more information. [ 593.617241] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] [ 593.617241] env[61898]: INFO nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Terminating instance [ 593.622952] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240376, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.893404] env[61898]: INFO nova.compute.manager [-] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Took 1.02 seconds to deallocate network for instance. [ 593.895886] env[61898]: DEBUG nova.compute.claims [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 593.895993] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.617266] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "refresh_cache-d74776d1-f374-4761-976c-f073b3821f42" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.617592] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquired lock "refresh_cache-d74776d1-f374-4761-976c-f073b3821f42" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.617592] env[61898]: DEBUG nova.network.neutron [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 594.630961] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240375, 'name': ReconfigVM_Task, 'duration_secs': 0.338977} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.633864] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 594.634493] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240376, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065761} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.635255] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-85a03efc-f210-427b-9463-afc90e2d012f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.636851] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 594.637840] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a587555-b7bd-499a-95f0-018a2316618c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.651626] env[61898]: DEBUG nova.network.neutron [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 594.662089] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 0dfabd80-a385-4124-af33-083559819d7a/0dfabd80-a385-4124-af33-083559819d7a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 594.663774] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a8089f7-33b9-49af-9c4c-d2d08e08cac8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.679294] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 594.679294] env[61898]: value = "task-1240377" [ 594.679294] env[61898]: _type = "Task" [ 594.679294] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.689331] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 594.689331] env[61898]: value = "task-1240378" [ 594.689331] env[61898]: _type = "Task" [ 594.689331] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.692864] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240377, 'name': Rename_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.708842] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240378, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.743243] env[61898]: DEBUG nova.network.neutron [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.814830] env[61898]: DEBUG nova.compute.manager [req-4b7febaf-0056-45e9-ba58-5aeb2fef7b98 req-b7e15e5e-0f50-471a-8008-f7605fa2ca5e service nova] [instance: d74776d1-f374-4761-976c-f073b3821f42] Received event network-changed-f1a03b78-20f3-4440-a998-e94de3baca8a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 594.814830] env[61898]: DEBUG nova.compute.manager [req-4b7febaf-0056-45e9-ba58-5aeb2fef7b98 req-b7e15e5e-0f50-471a-8008-f7605fa2ca5e service nova] [instance: d74776d1-f374-4761-976c-f073b3821f42] Refreshing instance network info cache due to event network-changed-f1a03b78-20f3-4440-a998-e94de3baca8a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 594.815023] env[61898]: DEBUG oslo_concurrency.lockutils [req-4b7febaf-0056-45e9-ba58-5aeb2fef7b98 req-b7e15e5e-0f50-471a-8008-f7605fa2ca5e service nova] Acquiring lock "refresh_cache-d74776d1-f374-4761-976c-f073b3821f42" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.070212] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a948051-ccc0-414b-9d57-11bf572d7ac5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.078215] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916786f6-964a-4864-b7bc-0590a34df3f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.109383] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0807c953-7f59-4423-ab34-7f512bb8ad56 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.116784] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f24fee-e131-4394-a55b-d9100743f8f7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.131801] env[61898]: DEBUG nova.compute.provider_tree [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.147781] env[61898]: DEBUG nova.network.neutron [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.193169] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240377, 'name': Rename_Task, 'duration_secs': 0.141802} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.196247] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 595.196501] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2edb474-4c2b-48a1-8b1c-e61ad9f57ef5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.203392] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240378, 'name': ReconfigVM_Task, 'duration_secs': 0.295116} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.204655] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 0dfabd80-a385-4124-af33-083559819d7a/0dfabd80-a385-4124-af33-083559819d7a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 595.205331] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 595.205331] env[61898]: value = "task-1240379" [ 595.205331] env[61898]: _type = "Task" [ 595.205331] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.205517] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-560a371c-3eba-4fba-a227-6dc61440a166 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.215766] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240379, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.217380] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 595.217380] env[61898]: value = "task-1240380" [ 595.217380] env[61898]: _type = "Task" [ 595.217380] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.221549] env[61898]: DEBUG nova.network.neutron [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.225697] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240380, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.246017] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Releasing lock "refresh_cache-c1c15498-af88-4fcf-9a58-7060502bcaf2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.246315] env[61898]: DEBUG nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 595.246509] env[61898]: DEBUG nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 595.246714] env[61898]: DEBUG nova.network.neutron [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 595.261734] env[61898]: DEBUG nova.network.neutron [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.635055] env[61898]: DEBUG nova.scheduler.client.report [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 595.717517] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240379, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.725629] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240380, 'name': Rename_Task, 'duration_secs': 0.134878} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.725933] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 595.726384] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Releasing lock "refresh_cache-d74776d1-f374-4761-976c-f073b3821f42" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.726749] env[61898]: DEBUG nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 595.726928] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 595.727165] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fab16335-bce2-4176-a772-29db122193b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.728667] env[61898]: DEBUG oslo_concurrency.lockutils [req-4b7febaf-0056-45e9-ba58-5aeb2fef7b98 req-b7e15e5e-0f50-471a-8008-f7605fa2ca5e service nova] Acquired lock "refresh_cache-d74776d1-f374-4761-976c-f073b3821f42" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.728815] env[61898]: DEBUG nova.network.neutron [req-4b7febaf-0056-45e9-ba58-5aeb2fef7b98 req-b7e15e5e-0f50-471a-8008-f7605fa2ca5e service nova] [instance: d74776d1-f374-4761-976c-f073b3821f42] Refreshing network info cache for port f1a03b78-20f3-4440-a998-e94de3baca8a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 595.729947] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7dfe0620-3aad-4cdd-a74f-b2de167edda4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.741295] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b840a27-2156-4a80-8578-1c56e8bcc5d3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.751544] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 595.751544] env[61898]: value = "task-1240381" [ 595.751544] env[61898]: _type = "Task" [ 595.751544] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.760893] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240381, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.766229] env[61898]: DEBUG nova.network.neutron [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.768031] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d74776d1-f374-4761-976c-f073b3821f42 could not be found. [ 595.768244] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 595.768428] env[61898]: INFO nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Took 0.04 seconds to destroy the instance on the hypervisor. [ 595.768666] env[61898]: DEBUG oslo.service.loopingcall [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 595.769134] env[61898]: DEBUG nova.compute.manager [-] [instance: d74776d1-f374-4761-976c-f073b3821f42] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 595.769228] env[61898]: DEBUG nova.network.neutron [-] [instance: d74776d1-f374-4761-976c-f073b3821f42] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 595.784833] env[61898]: DEBUG nova.network.neutron [-] [instance: d74776d1-f374-4761-976c-f073b3821f42] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.142671] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.660s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.142671] env[61898]: ERROR nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 96f6921e-36a7-4acc-929b-d04b099a7893, please check neutron logs for more information. [ 596.142671] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Traceback (most recent call last): [ 596.142671] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 596.142671] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] self.driver.spawn(context, instance, image_meta, [ 596.142671] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 596.142671] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] self._vmops.spawn(context, instance, image_meta, injected_files, [ 596.142671] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 596.142671] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] vm_ref = self.build_virtual_machine(instance, [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] vif_infos = vmwarevif.get_vif_info(self._session, [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] for vif in network_info: [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] return self._sync_wrapper(fn, *args, **kwargs) [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] self.wait() [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] self[:] = self._gt.wait() [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] return self._exit_event.wait() [ 596.143161] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] result = hub.switch() [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] return self.greenlet.switch() [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] result = function(*args, **kwargs) [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] return func(*args, **kwargs) [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] raise e [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] nwinfo = self.network_api.allocate_for_instance( [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 596.143525] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] created_port_ids = self._update_ports_for_instance( [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] with excutils.save_and_reraise_exception(): [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] self.force_reraise() [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] raise self.value [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] updated_port = self._update_port( [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] _ensure_no_port_binding_failure(port) [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 596.143885] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] raise exception.PortBindingFailed(port_id=port['id']) [ 596.144225] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] nova.exception.PortBindingFailed: Binding failed for port 96f6921e-36a7-4acc-929b-d04b099a7893, please check neutron logs for more information. [ 596.144225] env[61898]: ERROR nova.compute.manager [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] [ 596.144225] env[61898]: DEBUG nova.compute.utils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Binding failed for port 96f6921e-36a7-4acc-929b-d04b099a7893, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 596.148205] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.604s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.148205] env[61898]: INFO nova.compute.claims [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 596.150251] env[61898]: DEBUG nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Build of instance 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113 was re-scheduled: Binding failed for port 96f6921e-36a7-4acc-929b-d04b099a7893, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 596.151040] env[61898]: DEBUG nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 596.151373] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Acquiring lock "refresh_cache-3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.151627] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Acquired lock "refresh_cache-3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.151898] env[61898]: DEBUG nova.network.neutron [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 596.217747] env[61898]: DEBUG oslo_vmware.api [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240379, 'name': PowerOnVM_Task, 'duration_secs': 0.94968} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.218239] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 596.218442] env[61898]: INFO nova.compute.manager [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Took 9.75 seconds to spawn the instance on the hypervisor. [ 596.218614] env[61898]: DEBUG nova.compute.manager [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 596.219462] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f2012e-2771-4afe-b6d5-c3d3feaf8f88 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.248945] env[61898]: DEBUG nova.network.neutron [req-4b7febaf-0056-45e9-ba58-5aeb2fef7b98 req-b7e15e5e-0f50-471a-8008-f7605fa2ca5e service nova] [instance: d74776d1-f374-4761-976c-f073b3821f42] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.262759] env[61898]: DEBUG oslo_vmware.api [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240381, 'name': PowerOnVM_Task, 'duration_secs': 0.406766} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.267113] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 596.267113] env[61898]: DEBUG nova.compute.manager [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 596.267113] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adeed355-4f04-4c2d-b456-7d22bec1d170 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.270038] env[61898]: INFO nova.compute.manager [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] [instance: c1c15498-af88-4fcf-9a58-7060502bcaf2] Took 1.02 seconds to deallocate network for instance. [ 596.287292] env[61898]: DEBUG nova.network.neutron [-] [instance: d74776d1-f374-4761-976c-f073b3821f42] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.341730] env[61898]: DEBUG nova.network.neutron [req-4b7febaf-0056-45e9-ba58-5aeb2fef7b98 req-b7e15e5e-0f50-471a-8008-f7605fa2ca5e service nova] [instance: d74776d1-f374-4761-976c-f073b3821f42] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.671667] env[61898]: DEBUG nova.network.neutron [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 596.742337] env[61898]: INFO nova.compute.manager [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Took 42.79 seconds to build instance. [ 596.745922] env[61898]: DEBUG nova.network.neutron [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.787941] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.789795] env[61898]: INFO nova.compute.manager [-] [instance: d74776d1-f374-4761-976c-f073b3821f42] Took 1.02 seconds to deallocate network for instance. [ 596.791717] env[61898]: DEBUG nova.compute.claims [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 596.791854] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.843620] env[61898]: DEBUG oslo_concurrency.lockutils [req-4b7febaf-0056-45e9-ba58-5aeb2fef7b98 req-b7e15e5e-0f50-471a-8008-f7605fa2ca5e service nova] Releasing lock "refresh_cache-d74776d1-f374-4761-976c-f073b3821f42" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.844261] env[61898]: DEBUG nova.compute.manager [req-4b7febaf-0056-45e9-ba58-5aeb2fef7b98 req-b7e15e5e-0f50-471a-8008-f7605fa2ca5e service nova] [instance: d74776d1-f374-4761-976c-f073b3821f42] Received event network-vif-deleted-f1a03b78-20f3-4440-a998-e94de3baca8a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 597.244269] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f0b5225d-639a-446d-b4b0-77e630ee1da8 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "7ef91986-fb46-478b-85a5-05d597790ad9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.958s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.246632] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Releasing lock "refresh_cache-3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.246834] env[61898]: DEBUG nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 597.247014] env[61898]: DEBUG nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 597.247185] env[61898]: DEBUG nova.network.neutron [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 597.265706] env[61898]: DEBUG nova.network.neutron [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.304428] env[61898]: INFO nova.scheduler.client.report [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Deleted allocations for instance c1c15498-af88-4fcf-9a58-7060502bcaf2 [ 597.367748] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Acquiring lock "455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.370015] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Lock "455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.566873] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08bf4730-2d83-4df6-97ad-ae6aa68a314d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.575042] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c6bac6b-f971-4e8d-86b1-a4e2f8698742 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.607445] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8db6fa-8859-4644-b67d-e7060c8f31e7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.616123] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fc4c45-9bb6-4349-a370-c2cf7f57cf0e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.630202] env[61898]: DEBUG nova.compute.provider_tree [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.660047] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "0dfabd80-a385-4124-af33-083559819d7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.660307] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "0dfabd80-a385-4124-af33-083559819d7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.660555] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "0dfabd80-a385-4124-af33-083559819d7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.660753] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "0dfabd80-a385-4124-af33-083559819d7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.660916] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "0dfabd80-a385-4124-af33-083559819d7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.662956] env[61898]: INFO nova.compute.manager [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Terminating instance [ 597.749996] env[61898]: DEBUG nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 597.767502] env[61898]: DEBUG nova.network.neutron [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.815860] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac96ecf4-2cdf-475f-8ca7-f4166328cf37 tempest-TenantUsagesTestJSON-419619465 tempest-TenantUsagesTestJSON-419619465-project-member] Lock "c1c15498-af88-4fcf-9a58-7060502bcaf2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.457s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.972979] env[61898]: INFO nova.compute.manager [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Rebuilding instance [ 598.008209] env[61898]: DEBUG nova.compute.manager [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 598.009066] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27fe323-baf4-4ed4-b92e-7ff5a7784623 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.134495] env[61898]: DEBUG nova.scheduler.client.report [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 598.166870] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "refresh_cache-0dfabd80-a385-4124-af33-083559819d7a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.167758] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquired lock "refresh_cache-0dfabd80-a385-4124-af33-083559819d7a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.167758] env[61898]: DEBUG nova.network.neutron [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 598.270028] env[61898]: INFO nova.compute.manager [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] [instance: 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113] Took 1.02 seconds to deallocate network for instance. [ 598.274059] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.318700] env[61898]: DEBUG nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 598.639623] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.640182] env[61898]: DEBUG nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 598.643354] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.776s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.687528] env[61898]: DEBUG nova.network.neutron [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.755679] env[61898]: DEBUG nova.network.neutron [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.838990] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.025023] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 599.025023] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a53a70e8-1748-446e-ac5c-db3d256ae419 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.031489] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 599.031489] env[61898]: value = "task-1240382" [ 599.031489] env[61898]: _type = "Task" [ 599.031489] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.042849] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.150244] env[61898]: DEBUG nova.compute.utils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 599.150787] env[61898]: DEBUG nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 599.151126] env[61898]: DEBUG nova.network.neutron [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 599.204157] env[61898]: DEBUG nova.policy [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f95bc2826b0746e7b2fe527fd51e3611', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cd707ca6a4548b99a375d5800dd16fe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 599.260818] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Releasing lock "refresh_cache-0dfabd80-a385-4124-af33-083559819d7a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.260818] env[61898]: DEBUG nova.compute.manager [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 599.260932] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 599.264255] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae49ec35-f72f-4dab-a914-5555e2647e25 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.278699] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 599.278699] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a3feb57-0b7b-419e-bc63-1746828e8d44 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.293019] env[61898]: DEBUG oslo_vmware.api [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 599.293019] env[61898]: value = "task-1240383" [ 599.293019] env[61898]: _type = "Task" [ 599.293019] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.304370] env[61898]: DEBUG oslo_vmware.api [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.305653] env[61898]: INFO nova.scheduler.client.report [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Deleted allocations for instance 3765dd5b-2fd3-4acb-87b8-e5a7d8df4113 [ 599.552323] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240382, 'name': PowerOffVM_Task, 'duration_secs': 0.12991} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.552608] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 599.552838] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 599.553644] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a0056a-fd51-4b28-8425-bf2af06ee412 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.561797] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 599.562364] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65d5ff98-33fc-4021-9dbc-3a4e375c1cb6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.593904] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 599.594195] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 599.594372] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Deleting the datastore file [datastore1] 7ef91986-fb46-478b-85a5-05d597790ad9 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 599.594599] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64d92469-4ed4-4cbf-9955-e3bf6c525631 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.603136] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 599.603136] env[61898]: value = "task-1240385" [ 599.603136] env[61898]: _type = "Task" [ 599.603136] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.614991] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240385, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.656288] env[61898]: DEBUG nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 599.690482] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6e3481-5282-4b8d-bf34-ce5db7737aec {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.711287] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94662d30-4847-4b68-8bfd-6fa54c9b9e52 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.766074] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762a4f6e-eb61-4a87-bddb-28ac3eaf2510 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.776307] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8e5703-a458-44fa-a0e5-a4af0ada05da {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.791779] env[61898]: DEBUG nova.compute.provider_tree [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.803129] env[61898]: DEBUG oslo_vmware.api [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240383, 'name': PowerOffVM_Task, 'duration_secs': 0.210195} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.803129] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 599.803129] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 599.803335] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1da5b876-4e8a-4aa3-bca4-4012f7d3413e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.819179] env[61898]: DEBUG oslo_concurrency.lockutils [None req-088eec68-7ddc-4c05-8ac7-08fab5f4d1e7 tempest-ServersTestManualDisk-950551618 tempest-ServersTestManualDisk-950551618-project-member] Lock "3765dd5b-2fd3-4acb-87b8-e5a7d8df4113" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.460s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.823416] env[61898]: DEBUG nova.network.neutron [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Successfully created port: b9301970-192e-4924-b3dc-377b73f06130 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 599.998214] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 599.998486] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 599.998676] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Deleting the datastore file [datastore1] 0dfabd80-a385-4124-af33-083559819d7a {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 599.998969] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29655627-bd0b-43c5-9bd3-b8b9ad208093 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.008788] env[61898]: DEBUG oslo_vmware.api [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 600.008788] env[61898]: value = "task-1240387" [ 600.008788] env[61898]: _type = "Task" [ 600.008788] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.018539] env[61898]: DEBUG oslo_vmware.api [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240387, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.115181] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240385, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2093} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.115181] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 600.115181] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 600.115992] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 600.294548] env[61898]: DEBUG nova.scheduler.client.report [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 600.321453] env[61898]: DEBUG nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 600.522250] env[61898]: DEBUG oslo_vmware.api [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240387, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20853} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.522250] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 600.522495] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 600.522588] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 600.522750] env[61898]: INFO nova.compute.manager [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Took 1.26 seconds to destroy the instance on the hypervisor. [ 600.522984] env[61898]: DEBUG oslo.service.loopingcall [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.523182] env[61898]: DEBUG nova.compute.manager [-] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 600.523291] env[61898]: DEBUG nova.network.neutron [-] [instance: 0dfabd80-a385-4124-af33-083559819d7a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 600.552637] env[61898]: DEBUG nova.network.neutron [-] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.587131] env[61898]: DEBUG nova.compute.manager [req-ec6e7b77-e9e1-41fe-ad2b-21ac461b6d69 req-9dd70b61-c06e-4e4d-9bd0-fe8c59e88a4d service nova] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Received event network-changed-b9301970-192e-4924-b3dc-377b73f06130 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 600.587131] env[61898]: DEBUG nova.compute.manager [req-ec6e7b77-e9e1-41fe-ad2b-21ac461b6d69 req-9dd70b61-c06e-4e4d-9bd0-fe8c59e88a4d service nova] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Refreshing instance network info cache due to event network-changed-b9301970-192e-4924-b3dc-377b73f06130. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 600.587131] env[61898]: DEBUG oslo_concurrency.lockutils [req-ec6e7b77-e9e1-41fe-ad2b-21ac461b6d69 req-9dd70b61-c06e-4e4d-9bd0-fe8c59e88a4d service nova] Acquiring lock "refresh_cache-f2f968db-d4e2-451d-afe6-330196eba6c2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.587131] env[61898]: DEBUG oslo_concurrency.lockutils [req-ec6e7b77-e9e1-41fe-ad2b-21ac461b6d69 req-9dd70b61-c06e-4e4d-9bd0-fe8c59e88a4d service nova] Acquired lock "refresh_cache-f2f968db-d4e2-451d-afe6-330196eba6c2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.587131] env[61898]: DEBUG nova.network.neutron [req-ec6e7b77-e9e1-41fe-ad2b-21ac461b6d69 req-9dd70b61-c06e-4e4d-9bd0-fe8c59e88a4d service nova] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Refreshing network info cache for port b9301970-192e-4924-b3dc-377b73f06130 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 600.666807] env[61898]: DEBUG nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 600.699449] env[61898]: DEBUG nova.virt.hardware [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 600.699830] env[61898]: DEBUG nova.virt.hardware [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 600.700057] env[61898]: DEBUG nova.virt.hardware [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.700569] env[61898]: DEBUG nova.virt.hardware [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 600.700830] env[61898]: DEBUG nova.virt.hardware [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.701070] env[61898]: DEBUG nova.virt.hardware [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 600.702019] env[61898]: DEBUG nova.virt.hardware [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 600.702019] env[61898]: DEBUG nova.virt.hardware [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 600.702019] env[61898]: DEBUG nova.virt.hardware [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 600.702419] env[61898]: DEBUG nova.virt.hardware [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 600.703029] env[61898]: DEBUG nova.virt.hardware [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.703835] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61470f2-e3c5-4b72-879c-2a13a2447906 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.717581] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7eaeef8-7af8-4c86-a99e-e67b7ea40683 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.782511] env[61898]: ERROR nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b9301970-192e-4924-b3dc-377b73f06130, please check neutron logs for more information. [ 600.782511] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 600.782511] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 600.782511] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 600.782511] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 600.782511] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 600.782511] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 600.782511] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 600.782511] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.782511] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 600.782511] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.782511] env[61898]: ERROR nova.compute.manager raise self.value [ 600.782511] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 600.782511] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 600.782511] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.782511] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 600.783362] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.783362] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 600.783362] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b9301970-192e-4924-b3dc-377b73f06130, please check neutron logs for more information. [ 600.783362] env[61898]: ERROR nova.compute.manager [ 600.783362] env[61898]: Traceback (most recent call last): [ 600.783362] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 600.783362] env[61898]: listener.cb(fileno) [ 600.783362] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.783362] env[61898]: result = function(*args, **kwargs) [ 600.783362] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 600.783362] env[61898]: return func(*args, **kwargs) [ 600.783362] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 600.783362] env[61898]: raise e [ 600.783362] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 600.783362] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 600.783362] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 600.783362] env[61898]: created_port_ids = self._update_ports_for_instance( [ 600.783362] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 600.783362] env[61898]: with excutils.save_and_reraise_exception(): [ 600.783362] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.783362] env[61898]: self.force_reraise() [ 600.783362] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.783362] env[61898]: raise self.value [ 600.783362] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 600.783362] env[61898]: updated_port = self._update_port( [ 600.783362] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.783362] env[61898]: _ensure_no_port_binding_failure(port) [ 600.783362] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.783362] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 600.784457] env[61898]: nova.exception.PortBindingFailed: Binding failed for port b9301970-192e-4924-b3dc-377b73f06130, please check neutron logs for more information. [ 600.784457] env[61898]: Removing descriptor: 20 [ 600.784457] env[61898]: ERROR nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b9301970-192e-4924-b3dc-377b73f06130, please check neutron logs for more information. [ 600.784457] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Traceback (most recent call last): [ 600.784457] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 600.784457] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] yield resources [ 600.784457] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 600.784457] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] self.driver.spawn(context, instance, image_meta, [ 600.784457] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 600.784457] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.784457] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 600.784457] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] vm_ref = self.build_virtual_machine(instance, [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] vif_infos = vmwarevif.get_vif_info(self._session, [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] for vif in network_info: [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] return self._sync_wrapper(fn, *args, **kwargs) [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] self.wait() [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] self[:] = self._gt.wait() [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] return self._exit_event.wait() [ 600.784921] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] result = hub.switch() [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] return self.greenlet.switch() [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] result = function(*args, **kwargs) [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] return func(*args, **kwargs) [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] raise e [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] nwinfo = self.network_api.allocate_for_instance( [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 600.785426] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] created_port_ids = self._update_ports_for_instance( [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] with excutils.save_and_reraise_exception(): [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] self.force_reraise() [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] raise self.value [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] updated_port = self._update_port( [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] _ensure_no_port_binding_failure(port) [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.786056] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] raise exception.PortBindingFailed(port_id=port['id']) [ 600.786509] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] nova.exception.PortBindingFailed: Binding failed for port b9301970-192e-4924-b3dc-377b73f06130, please check neutron logs for more information. [ 600.786509] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] [ 600.786509] env[61898]: INFO nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Terminating instance [ 600.799245] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.156s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.800035] env[61898]: ERROR nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c6001d62-fb77-48d7-8563-7f6acad85cb8, please check neutron logs for more information. [ 600.800035] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Traceback (most recent call last): [ 600.800035] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 600.800035] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] self.driver.spawn(context, instance, image_meta, [ 600.800035] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 600.800035] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] self._vmops.spawn(context, instance, image_meta, injected_files, [ 600.800035] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 600.800035] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] vm_ref = self.build_virtual_machine(instance, [ 600.800035] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 600.800035] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] vif_infos = vmwarevif.get_vif_info(self._session, [ 600.800035] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] for vif in network_info: [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] return self._sync_wrapper(fn, *args, **kwargs) [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] self.wait() [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] self[:] = self._gt.wait() [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] return self._exit_event.wait() [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] result = hub.switch() [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 600.800511] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] return self.greenlet.switch() [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] result = function(*args, **kwargs) [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] return func(*args, **kwargs) [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] raise e [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] nwinfo = self.network_api.allocate_for_instance( [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] created_port_ids = self._update_ports_for_instance( [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] with excutils.save_and_reraise_exception(): [ 600.800982] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 600.801569] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] self.force_reraise() [ 600.801569] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 600.801569] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] raise self.value [ 600.801569] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 600.801569] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] updated_port = self._update_port( [ 600.801569] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 600.801569] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] _ensure_no_port_binding_failure(port) [ 600.801569] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 600.801569] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] raise exception.PortBindingFailed(port_id=port['id']) [ 600.801569] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] nova.exception.PortBindingFailed: Binding failed for port c6001d62-fb77-48d7-8563-7f6acad85cb8, please check neutron logs for more information. [ 600.801569] env[61898]: ERROR nova.compute.manager [instance: cf94c3f2-a4db-479f-8251-f2e403697678] [ 600.801976] env[61898]: DEBUG nova.compute.utils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Binding failed for port c6001d62-fb77-48d7-8563-7f6acad85cb8, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 600.802961] env[61898]: DEBUG nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Build of instance cf94c3f2-a4db-479f-8251-f2e403697678 was re-scheduled: Binding failed for port c6001d62-fb77-48d7-8563-7f6acad85cb8, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 600.805075] env[61898]: DEBUG nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 600.805320] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Acquiring lock "refresh_cache-cf94c3f2-a4db-479f-8251-f2e403697678" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.805464] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Acquired lock "refresh_cache-cf94c3f2-a4db-479f-8251-f2e403697678" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.805649] env[61898]: DEBUG nova.network.neutron [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.807760] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.156s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.808053] env[61898]: INFO nova.compute.claims [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.850246] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.054586] env[61898]: DEBUG nova.network.neutron [-] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.119062] env[61898]: DEBUG nova.network.neutron [req-ec6e7b77-e9e1-41fe-ad2b-21ac461b6d69 req-9dd70b61-c06e-4e4d-9bd0-fe8c59e88a4d service nova] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.160009] env[61898]: DEBUG nova.virt.hardware [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 601.160321] env[61898]: DEBUG nova.virt.hardware [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 601.160566] env[61898]: DEBUG nova.virt.hardware [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 601.160636] env[61898]: DEBUG nova.virt.hardware [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 601.160813] env[61898]: DEBUG nova.virt.hardware [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 601.160889] env[61898]: DEBUG nova.virt.hardware [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 601.161193] env[61898]: DEBUG nova.virt.hardware [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 601.161433] env[61898]: DEBUG nova.virt.hardware [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 601.161650] env[61898]: DEBUG nova.virt.hardware [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 601.161980] env[61898]: DEBUG nova.virt.hardware [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 601.161980] env[61898]: DEBUG nova.virt.hardware [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 601.163064] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1cf5d0-6a65-4f19-8c4b-6e580bbf2e43 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.171842] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d650c0c3-a1f8-40c0-890e-378990ba0dfe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.187400] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 601.193287] env[61898]: DEBUG oslo.service.loopingcall [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.193970] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 601.193970] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-238b58e4-a980-4d9d-88bb-8da6aff75dc9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.218200] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 601.218200] env[61898]: value = "task-1240388" [ 601.218200] env[61898]: _type = "Task" [ 601.218200] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.227810] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240388, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.289754] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Acquiring lock "refresh_cache-f2f968db-d4e2-451d-afe6-330196eba6c2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.330432] env[61898]: DEBUG nova.network.neutron [req-ec6e7b77-e9e1-41fe-ad2b-21ac461b6d69 req-9dd70b61-c06e-4e4d-9bd0-fe8c59e88a4d service nova] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.335145] env[61898]: DEBUG nova.network.neutron [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.420525] env[61898]: DEBUG nova.network.neutron [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.557089] env[61898]: INFO nova.compute.manager [-] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Took 1.03 seconds to deallocate network for instance. [ 601.730867] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240388, 'name': CreateVM_Task, 'duration_secs': 0.327872} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.731111] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 601.731550] env[61898]: DEBUG oslo_concurrency.lockutils [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.731754] env[61898]: DEBUG oslo_concurrency.lockutils [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.732121] env[61898]: DEBUG oslo_concurrency.lockutils [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 601.732407] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f81a48f-a842-4dca-a236-a53d515f9fc0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.738245] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 601.738245] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5224a707-8685-4f34-7966-f696f720d40f" [ 601.738245] env[61898]: _type = "Task" [ 601.738245] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.746533] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5224a707-8685-4f34-7966-f696f720d40f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.833253] env[61898]: DEBUG oslo_concurrency.lockutils [req-ec6e7b77-e9e1-41fe-ad2b-21ac461b6d69 req-9dd70b61-c06e-4e4d-9bd0-fe8c59e88a4d service nova] Releasing lock "refresh_cache-f2f968db-d4e2-451d-afe6-330196eba6c2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.833642] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Acquired lock "refresh_cache-f2f968db-d4e2-451d-afe6-330196eba6c2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.833818] env[61898]: DEBUG nova.network.neutron [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 601.923616] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Releasing lock "refresh_cache-cf94c3f2-a4db-479f-8251-f2e403697678" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.923847] env[61898]: DEBUG nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 601.924034] env[61898]: DEBUG nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 601.924193] env[61898]: DEBUG nova.network.neutron [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 601.959305] env[61898]: DEBUG nova.network.neutron [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.064183] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.259044] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5224a707-8685-4f34-7966-f696f720d40f, 'name': SearchDatastore_Task, 'duration_secs': 0.009434} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.262472] env[61898]: DEBUG oslo_concurrency.lockutils [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.262969] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 602.263327] env[61898]: DEBUG oslo_concurrency.lockutils [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.263590] env[61898]: DEBUG oslo_concurrency.lockutils [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.263871] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 602.265299] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93eb6076-3f9e-448b-82f1-d89d7199f4e4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.275951] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 602.276107] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 602.280084] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7969e84b-ecf6-4955-92bc-ec1150027348 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.286758] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 602.286758] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]527e838b-d1f0-3834-88ee-a812e0192ff1" [ 602.286758] env[61898]: _type = "Task" [ 602.286758] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.293145] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1d883d-aa4a-497c-b24a-a5be26ca5aca {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.300194] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527e838b-d1f0-3834-88ee-a812e0192ff1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.305777] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3718ca-ebcc-4c73-9b06-4d0b13fb88e9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.344449] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc36e4f-9dcb-4ab4-b5e9-f42bee437806 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.355031] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7c0af3-fe36-4ee0-b3d1-0dad42a33429 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.373224] env[61898]: DEBUG nova.compute.provider_tree [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.379285] env[61898]: DEBUG nova.network.neutron [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.463660] env[61898]: DEBUG nova.network.neutron [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.537079] env[61898]: DEBUG nova.network.neutron [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.802060] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527e838b-d1f0-3834-88ee-a812e0192ff1, 'name': SearchDatastore_Task, 'duration_secs': 0.00924} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.802897] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5adc594-640a-4800-a742-299ae104d6f9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.811737] env[61898]: DEBUG nova.compute.manager [req-9836e525-2bc2-4e76-a9c9-5ddbcccf95bd req-aa75eabd-1548-4252-a98b-5d378853226a service nova] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Received event network-vif-deleted-b9301970-192e-4924-b3dc-377b73f06130 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 602.818188] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 602.818188] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5298e85f-9b55-842e-5c45-957270c12e03" [ 602.818188] env[61898]: _type = "Task" [ 602.818188] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.832340] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5298e85f-9b55-842e-5c45-957270c12e03, 'name': SearchDatastore_Task, 'duration_secs': 0.011275} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.832340] env[61898]: DEBUG oslo_concurrency.lockutils [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.832340] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 602.832340] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94f54be4-d41f-46b5-843d-0b0051616766 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.841209] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 602.841209] env[61898]: value = "task-1240389" [ 602.841209] env[61898]: _type = "Task" [ 602.841209] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.848812] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240389, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.881242] env[61898]: DEBUG nova.scheduler.client.report [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 602.971684] env[61898]: INFO nova.compute.manager [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] [instance: cf94c3f2-a4db-479f-8251-f2e403697678] Took 1.05 seconds to deallocate network for instance. [ 603.040247] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Releasing lock "refresh_cache-f2f968db-d4e2-451d-afe6-330196eba6c2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.040711] env[61898]: DEBUG nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 603.040913] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 603.041598] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5be9b3f7-db8c-499b-9298-88560d1942ca {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.054920] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c73ecc-0f06-4671-b110-902f1a84e563 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.086581] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f2f968db-d4e2-451d-afe6-330196eba6c2 could not be found. [ 603.086880] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 603.086991] env[61898]: INFO nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 603.089828] env[61898]: DEBUG oslo.service.loopingcall [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 603.090183] env[61898]: DEBUG nova.compute.manager [-] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 603.090343] env[61898]: DEBUG nova.network.neutron [-] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 603.113522] env[61898]: DEBUG nova.network.neutron [-] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.352058] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240389, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459775} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.354287] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 603.354287] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 603.354287] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef538353-54bb-4686-bc10-d56230e8f077 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.362802] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 603.362802] env[61898]: value = "task-1240390" [ 603.362802] env[61898]: _type = "Task" [ 603.362802] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.373345] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240390, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.385473] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.579s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.386400] env[61898]: DEBUG nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 603.390785] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.272s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.616155] env[61898]: DEBUG nova.network.neutron [-] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.872849] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240390, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063519} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.872849] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 603.875391] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afaac18-7261-4663-8d1c-5506a1af946c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.896688] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 603.896875] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c729e05-2953-4791-bea3-f5b7e6686623 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.912071] env[61898]: DEBUG nova.compute.utils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.917046] env[61898]: DEBUG nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 603.917224] env[61898]: DEBUG nova.network.neutron [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.926426] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 603.926426] env[61898]: value = "task-1240391" [ 603.926426] env[61898]: _type = "Task" [ 603.926426] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.935921] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240391, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.008271] env[61898]: INFO nova.scheduler.client.report [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Deleted allocations for instance cf94c3f2-a4db-479f-8251-f2e403697678 [ 604.028159] env[61898]: DEBUG nova.policy [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6142b6dd97fa48199f3edeb8ceef8d70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b549768af40c4edbb845f0e1f27ab52c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 604.118710] env[61898]: INFO nova.compute.manager [-] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Took 1.03 seconds to deallocate network for instance. [ 604.121292] env[61898]: DEBUG nova.compute.claims [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 604.121472] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.417482] env[61898]: DEBUG nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 604.422324] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72ddc83-a692-4e84-bb86-8c9317c341e5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.433245] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f79dbb-1847-4d80-8c88-4de2f6877f5a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.439626] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240391, 'name': ReconfigVM_Task, 'duration_secs': 0.28336} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.440271] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 604.440951] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa50883e-9a12-4183-a926-b189ece24a76 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.471137] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00fcde5-27e6-469f-84e1-a84c5ee0d9b6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.473862] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 604.473862] env[61898]: value = "task-1240392" [ 604.473862] env[61898]: _type = "Task" [ 604.473862] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.481700] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174bbcc3-3df5-4f6a-895b-033612305df9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.489456] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240392, 'name': Rename_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.500937] env[61898]: DEBUG nova.compute.provider_tree [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.519211] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3595c649-e5b4-498e-a531-a2d694ce1b8f tempest-ServerGroupTestJSON-1605180372 tempest-ServerGroupTestJSON-1605180372-project-member] Lock "cf94c3f2-a4db-479f-8251-f2e403697678" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.115s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.750759] env[61898]: DEBUG nova.network.neutron [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Successfully created port: f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.984960] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240392, 'name': Rename_Task, 'duration_secs': 0.160224} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.985323] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 604.985852] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4681fdc8-df28-485a-b001-b8dd304a2c9a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.992640] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 604.992640] env[61898]: value = "task-1240393" [ 604.992640] env[61898]: _type = "Task" [ 604.992640] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.001770] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240393, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.005167] env[61898]: DEBUG nova.scheduler.client.report [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 605.021525] env[61898]: DEBUG nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 605.436057] env[61898]: DEBUG nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 605.472832] env[61898]: DEBUG nova.virt.hardware [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.473081] env[61898]: DEBUG nova.virt.hardware [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.473242] env[61898]: DEBUG nova.virt.hardware [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.473455] env[61898]: DEBUG nova.virt.hardware [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.473686] env[61898]: DEBUG nova.virt.hardware [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.473738] env[61898]: DEBUG nova.virt.hardware [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.473936] env[61898]: DEBUG nova.virt.hardware [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.474119] env[61898]: DEBUG nova.virt.hardware [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.474289] env[61898]: DEBUG nova.virt.hardware [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.474451] env[61898]: DEBUG nova.virt.hardware [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.474620] env[61898]: DEBUG nova.virt.hardware [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.475501] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc99366a-f4d2-44ab-af18-1a0022482451 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.484892] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7214df-66a1-40ed-a1f3-36ab3c49cdb5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.512186] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.120s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.512186] env[61898]: ERROR nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4bb4e208-9c5a-4d86-974a-3c49a938ab12, please check neutron logs for more information. [ 605.512186] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Traceback (most recent call last): [ 605.512186] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 605.512186] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] self.driver.spawn(context, instance, image_meta, [ 605.512186] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 605.512186] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 605.512186] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 605.512186] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] vm_ref = self.build_virtual_machine(instance, [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] vif_infos = vmwarevif.get_vif_info(self._session, [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] for vif in network_info: [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] return self._sync_wrapper(fn, *args, **kwargs) [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] self.wait() [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] self[:] = self._gt.wait() [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] return self._exit_event.wait() [ 605.512438] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] current.throw(*self._exc) [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] result = function(*args, **kwargs) [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] return func(*args, **kwargs) [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] raise e [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] nwinfo = self.network_api.allocate_for_instance( [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] created_port_ids = self._update_ports_for_instance( [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 605.512752] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] with excutils.save_and_reraise_exception(): [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] self.force_reraise() [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] raise self.value [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] updated_port = self._update_port( [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] _ensure_no_port_binding_failure(port) [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] raise exception.PortBindingFailed(port_id=port['id']) [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] nova.exception.PortBindingFailed: Binding failed for port 4bb4e208-9c5a-4d86-974a-3c49a938ab12, please check neutron logs for more information. [ 605.513077] env[61898]: ERROR nova.compute.manager [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] [ 605.513371] env[61898]: DEBUG nova.compute.utils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Binding failed for port 4bb4e208-9c5a-4d86-974a-3c49a938ab12, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 605.513401] env[61898]: DEBUG nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Build of instance 31aa8536-1597-4b61-b069-80daf5306dd6 was re-scheduled: Binding failed for port 4bb4e208-9c5a-4d86-974a-3c49a938ab12, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 605.513807] env[61898]: DEBUG nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 605.514042] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquiring lock "refresh_cache-31aa8536-1597-4b61-b069-80daf5306dd6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.514188] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Acquired lock "refresh_cache-31aa8536-1597-4b61-b069-80daf5306dd6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.514339] env[61898]: DEBUG nova.network.neutron [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.518777] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.340s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.520364] env[61898]: INFO nova.compute.claims [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.523888] env[61898]: DEBUG oslo_vmware.api [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240393, 'name': PowerOnVM_Task, 'duration_secs': 0.472169} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.525298] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 605.525298] env[61898]: DEBUG nova.compute.manager [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 605.525813] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de631593-eca6-4a2b-a499-45565a410cff {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.559981] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.034534] env[61898]: DEBUG nova.network.neutron [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.052421] env[61898]: DEBUG oslo_concurrency.lockutils [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.104976] env[61898]: DEBUG nova.network.neutron [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.608255] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Releasing lock "refresh_cache-31aa8536-1597-4b61-b069-80daf5306dd6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.608255] env[61898]: DEBUG nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 606.609293] env[61898]: DEBUG nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 606.611200] env[61898]: DEBUG nova.network.neutron [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 606.645655] env[61898]: DEBUG nova.network.neutron [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.008133] env[61898]: ERROR nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3, please check neutron logs for more information. [ 607.008133] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 607.008133] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 607.008133] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 607.008133] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.008133] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 607.008133] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.008133] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 607.008133] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.008133] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 607.008133] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.008133] env[61898]: ERROR nova.compute.manager raise self.value [ 607.008133] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.008133] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 607.008133] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.008133] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 607.008514] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.008514] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 607.008514] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3, please check neutron logs for more information. [ 607.008514] env[61898]: ERROR nova.compute.manager [ 607.008514] env[61898]: Traceback (most recent call last): [ 607.008514] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 607.008514] env[61898]: listener.cb(fileno) [ 607.008514] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.008514] env[61898]: result = function(*args, **kwargs) [ 607.008514] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 607.008514] env[61898]: return func(*args, **kwargs) [ 607.008514] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 607.008514] env[61898]: raise e [ 607.008514] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 607.008514] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 607.008514] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.008514] env[61898]: created_port_ids = self._update_ports_for_instance( [ 607.008514] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.008514] env[61898]: with excutils.save_and_reraise_exception(): [ 607.008514] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.008514] env[61898]: self.force_reraise() [ 607.008514] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.008514] env[61898]: raise self.value [ 607.008514] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.008514] env[61898]: updated_port = self._update_port( [ 607.008514] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.008514] env[61898]: _ensure_no_port_binding_failure(port) [ 607.008514] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.008514] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 607.009147] env[61898]: nova.exception.PortBindingFailed: Binding failed for port f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3, please check neutron logs for more information. [ 607.009147] env[61898]: Removing descriptor: 19 [ 607.009147] env[61898]: ERROR nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3, please check neutron logs for more information. [ 607.009147] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Traceback (most recent call last): [ 607.009147] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 607.009147] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] yield resources [ 607.009147] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 607.009147] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] self.driver.spawn(context, instance, image_meta, [ 607.009147] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 607.009147] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] self._vmops.spawn(context, instance, image_meta, injected_files, [ 607.009147] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 607.009147] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] vm_ref = self.build_virtual_machine(instance, [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] vif_infos = vmwarevif.get_vif_info(self._session, [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] for vif in network_info: [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] return self._sync_wrapper(fn, *args, **kwargs) [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] self.wait() [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] self[:] = self._gt.wait() [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] return self._exit_event.wait() [ 607.009713] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] result = hub.switch() [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] return self.greenlet.switch() [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] result = function(*args, **kwargs) [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] return func(*args, **kwargs) [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] raise e [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] nwinfo = self.network_api.allocate_for_instance( [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 607.010040] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] created_port_ids = self._update_ports_for_instance( [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] with excutils.save_and_reraise_exception(): [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] self.force_reraise() [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] raise self.value [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] updated_port = self._update_port( [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] _ensure_no_port_binding_failure(port) [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.010327] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] raise exception.PortBindingFailed(port_id=port['id']) [ 607.010587] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] nova.exception.PortBindingFailed: Binding failed for port f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3, please check neutron logs for more information. [ 607.010587] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] [ 607.010587] env[61898]: INFO nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Terminating instance [ 607.028157] env[61898]: DEBUG nova.compute.manager [req-8c18e7cd-b736-4e77-8d4b-2a9b35554f2c req-e700dfec-66aa-486e-984d-40b0527b9377 service nova] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Received event network-changed-f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 607.029352] env[61898]: DEBUG nova.compute.manager [req-8c18e7cd-b736-4e77-8d4b-2a9b35554f2c req-e700dfec-66aa-486e-984d-40b0527b9377 service nova] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Refreshing instance network info cache due to event network-changed-f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 607.029352] env[61898]: DEBUG oslo_concurrency.lockutils [req-8c18e7cd-b736-4e77-8d4b-2a9b35554f2c req-e700dfec-66aa-486e-984d-40b0527b9377 service nova] Acquiring lock "refresh_cache-69ad75e8-dcfc-499a-8f18-bf38575968be" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.029352] env[61898]: DEBUG oslo_concurrency.lockutils [req-8c18e7cd-b736-4e77-8d4b-2a9b35554f2c req-e700dfec-66aa-486e-984d-40b0527b9377 service nova] Acquired lock "refresh_cache-69ad75e8-dcfc-499a-8f18-bf38575968be" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.029352] env[61898]: DEBUG nova.network.neutron [req-8c18e7cd-b736-4e77-8d4b-2a9b35554f2c req-e700dfec-66aa-486e-984d-40b0527b9377 service nova] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Refreshing network info cache for port f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 607.032678] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abd0cb7-08c8-4cbf-bb09-3fcada463e71 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.045026] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce9ace3-2f80-45e4-945f-54d50e73a151 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.078045] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc60d77d-e091-4238-900b-18eb9db347d9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.087889] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccfcd0d-cbbd-44b5-bd1f-5e6e2858fa93 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.104610] env[61898]: DEBUG nova.compute.provider_tree [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.148236] env[61898]: DEBUG nova.network.neutron [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.371567] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Acquiring lock "2887126b-6db5-4578-a063-552e774542cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.372549] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Lock "2887126b-6db5-4578-a063-552e774542cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.518657] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "refresh_cache-69ad75e8-dcfc-499a-8f18-bf38575968be" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.561800] env[61898]: DEBUG nova.network.neutron [req-8c18e7cd-b736-4e77-8d4b-2a9b35554f2c req-e700dfec-66aa-486e-984d-40b0527b9377 service nova] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 607.608404] env[61898]: DEBUG nova.scheduler.client.report [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 607.651535] env[61898]: INFO nova.compute.manager [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] [instance: 31aa8536-1597-4b61-b069-80daf5306dd6] Took 1.04 seconds to deallocate network for instance. [ 607.763565] env[61898]: DEBUG nova.network.neutron [req-8c18e7cd-b736-4e77-8d4b-2a9b35554f2c req-e700dfec-66aa-486e-984d-40b0527b9377 service nova] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.885551] env[61898]: INFO nova.compute.manager [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Rebuilding instance [ 607.939881] env[61898]: DEBUG nova.compute.manager [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 607.940934] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cece58f-1ef5-4b50-801b-fe0cb4a57df4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.113853] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 608.115257] env[61898]: DEBUG nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 608.117787] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.221s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.267776] env[61898]: DEBUG oslo_concurrency.lockutils [req-8c18e7cd-b736-4e77-8d4b-2a9b35554f2c req-e700dfec-66aa-486e-984d-40b0527b9377 service nova] Releasing lock "refresh_cache-69ad75e8-dcfc-499a-8f18-bf38575968be" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.268268] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquired lock "refresh_cache-69ad75e8-dcfc-499a-8f18-bf38575968be" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.268496] env[61898]: DEBUG nova.network.neutron [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 608.622286] env[61898]: DEBUG nova.compute.utils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 608.627227] env[61898]: DEBUG nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 608.627442] env[61898]: DEBUG nova.network.neutron [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 608.692015] env[61898]: INFO nova.scheduler.client.report [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Deleted allocations for instance 31aa8536-1597-4b61-b069-80daf5306dd6 [ 608.727192] env[61898]: DEBUG nova.policy [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba42f2b4883140d4bb94dfac9119e400', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a95a9d6ac5c3414db6d8891ee1ada25a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 608.795554] env[61898]: DEBUG nova.network.neutron [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.908822] env[61898]: DEBUG nova.network.neutron [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.950179] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "9e6a3749-1974-4818-9cc6-76367d41b7e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.950610] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "9e6a3749-1974-4818-9cc6-76367d41b7e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.961763] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 608.961763] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1a7fa68-a4df-4b79-9801-e0034e11aa03 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.971592] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Waiting for the task: (returnval){ [ 608.971592] env[61898]: value = "task-1240394" [ 608.971592] env[61898]: _type = "Task" [ 608.971592] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.984450] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240394, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.991172] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "29eadea9-fa85-4f51-97d0-a941e1658094" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.991172] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "29eadea9-fa85-4f51-97d0-a941e1658094" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.022602] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "a0580308-d25b-47cb-9c1c-adb763be7925" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.022843] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "a0580308-d25b-47cb-9c1c-adb763be7925" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.127735] env[61898]: DEBUG nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 609.193623] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b68bfa-31b1-434c-b648-6eb8a2dbb5ff {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.198919] env[61898]: DEBUG oslo_concurrency.lockutils [None req-67d9f161-86b0-4d89-80d2-31b33b1cc97c tempest-DeleteServersAdminTestJSON-793568726 tempest-DeleteServersAdminTestJSON-793568726-project-member] Lock "31aa8536-1597-4b61-b069-80daf5306dd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.081s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.204505] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee1308e-5a00-43f6-8d2e-dbfba8eaf6f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.215151] env[61898]: DEBUG nova.compute.manager [req-2d92259d-00ee-44df-9d31-2fe27df4b7af req-92f3f1e4-ac14-4568-a085-04dda31b456b service nova] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Received event network-vif-deleted-f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 609.245145] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8732c3b-7d40-4537-bd8e-d20bc452bf84 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.255539] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9dee6f4-a4da-4e3c-b9ae-58e5bfa4f02f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.272387] env[61898]: DEBUG nova.compute.provider_tree [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.415305] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Releasing lock "refresh_cache-69ad75e8-dcfc-499a-8f18-bf38575968be" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.415826] env[61898]: DEBUG nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 609.415958] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 609.416282] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b7de9b3-728f-44ba-b967-dc8744e81e57 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.427064] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e937e81-2e3e-4b85-9875-502f7aaf0462 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.452291] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 69ad75e8-dcfc-499a-8f18-bf38575968be could not be found. [ 609.452531] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 609.452716] env[61898]: INFO nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Took 0.04 seconds to destroy the instance on the hypervisor. [ 609.452979] env[61898]: DEBUG oslo.service.loopingcall [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 609.453209] env[61898]: DEBUG nova.compute.manager [-] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 609.453303] env[61898]: DEBUG nova.network.neutron [-] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 609.479608] env[61898]: DEBUG nova.network.neutron [-] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.487865] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240394, 'name': PowerOffVM_Task, 'duration_secs': 0.11986} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.488831] env[61898]: DEBUG nova.network.neutron [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Successfully created port: 47837c36-a959-4078-8926-ab473335dca5 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.490902] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 609.491290] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 609.492740] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d9dca4-0a97-4dae-8c97-9688acce70d9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.502520] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 609.502944] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1da34b63-769f-4720-b235-fc62bd9ef9b4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.534985] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 609.534985] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 609.534985] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Deleting the datastore file [datastore2] 7ef91986-fb46-478b-85a5-05d597790ad9 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 609.534985] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f83b184e-11c2-4ab3-b46d-3de63ceffce6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.543224] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Waiting for the task: (returnval){ [ 609.543224] env[61898]: value = "task-1240396" [ 609.543224] env[61898]: _type = "Task" [ 609.543224] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.552495] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240396, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.704847] env[61898]: DEBUG nova.compute.manager [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 609.777552] env[61898]: DEBUG nova.scheduler.client.report [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 609.986978] env[61898]: DEBUG nova.network.neutron [-] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.051318] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240396, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111191} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.051633] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 610.051993] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 610.052312] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 610.138530] env[61898]: DEBUG nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 610.171732] env[61898]: DEBUG nova.virt.hardware [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 610.172124] env[61898]: DEBUG nova.virt.hardware [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 610.172499] env[61898]: DEBUG nova.virt.hardware [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.172748] env[61898]: DEBUG nova.virt.hardware [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 610.172846] env[61898]: DEBUG nova.virt.hardware [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.173249] env[61898]: DEBUG nova.virt.hardware [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 610.173378] env[61898]: DEBUG nova.virt.hardware [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 610.173626] env[61898]: DEBUG nova.virt.hardware [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 610.173871] env[61898]: DEBUG nova.virt.hardware [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 610.174709] env[61898]: DEBUG nova.virt.hardware [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 610.174709] env[61898]: DEBUG nova.virt.hardware [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 610.179136] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87c7473-7879-4a2e-889e-02ab2631beed {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.189924] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1c524c-8c06-4ccf-84af-6c55b99fefe4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.238589] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.283593] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.165s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.284031] env[61898]: ERROR nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3e48fdd5-5b31-4b00-adbd-84783f49950c, please check neutron logs for more information. [ 610.284031] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Traceback (most recent call last): [ 610.284031] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 610.284031] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] self.driver.spawn(context, instance, image_meta, [ 610.284031] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 610.284031] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 610.284031] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 610.284031] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] vm_ref = self.build_virtual_machine(instance, [ 610.284031] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 610.284031] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] vif_infos = vmwarevif.get_vif_info(self._session, [ 610.284031] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] for vif in network_info: [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] return self._sync_wrapper(fn, *args, **kwargs) [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] self.wait() [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] self[:] = self._gt.wait() [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] return self._exit_event.wait() [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] current.throw(*self._exc) [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.284456] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] result = function(*args, **kwargs) [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] return func(*args, **kwargs) [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] raise e [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] nwinfo = self.network_api.allocate_for_instance( [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] created_port_ids = self._update_ports_for_instance( [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] with excutils.save_and_reraise_exception(): [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] self.force_reraise() [ 610.284784] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 610.285188] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] raise self.value [ 610.285188] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 610.285188] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] updated_port = self._update_port( [ 610.285188] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 610.285188] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] _ensure_no_port_binding_failure(port) [ 610.285188] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 610.285188] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] raise exception.PortBindingFailed(port_id=port['id']) [ 610.285188] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] nova.exception.PortBindingFailed: Binding failed for port 3e48fdd5-5b31-4b00-adbd-84783f49950c, please check neutron logs for more information. [ 610.285188] env[61898]: ERROR nova.compute.manager [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] [ 610.285188] env[61898]: DEBUG nova.compute.utils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Binding failed for port 3e48fdd5-5b31-4b00-adbd-84783f49950c, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 610.285987] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.499s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.286175] env[61898]: DEBUG nova.objects.instance [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61898) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 610.290037] env[61898]: DEBUG nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Build of instance ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0 was re-scheduled: Binding failed for port 3e48fdd5-5b31-4b00-adbd-84783f49950c, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 610.290565] env[61898]: DEBUG nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 610.290835] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Acquiring lock "refresh_cache-ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.291023] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Acquired lock "refresh_cache-ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.291247] env[61898]: DEBUG nova.network.neutron [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 610.489914] env[61898]: INFO nova.compute.manager [-] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Took 1.04 seconds to deallocate network for instance. [ 610.492371] env[61898]: DEBUG nova.compute.claims [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 610.492547] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.823771] env[61898]: DEBUG nova.network.neutron [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.924851] env[61898]: DEBUG nova.network.neutron [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.071358] env[61898]: ERROR nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 47837c36-a959-4078-8926-ab473335dca5, please check neutron logs for more information. [ 611.071358] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 611.071358] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 611.071358] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 611.071358] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.071358] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 611.071358] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.071358] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 611.071358] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.071358] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 611.071358] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.071358] env[61898]: ERROR nova.compute.manager raise self.value [ 611.071358] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.071358] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 611.071358] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.071358] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 611.071747] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.071747] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 611.071747] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 47837c36-a959-4078-8926-ab473335dca5, please check neutron logs for more information. [ 611.071747] env[61898]: ERROR nova.compute.manager [ 611.071747] env[61898]: Traceback (most recent call last): [ 611.071747] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 611.071747] env[61898]: listener.cb(fileno) [ 611.071747] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.071747] env[61898]: result = function(*args, **kwargs) [ 611.071747] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 611.071747] env[61898]: return func(*args, **kwargs) [ 611.071747] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 611.071747] env[61898]: raise e [ 611.071747] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 611.071747] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 611.071747] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.071747] env[61898]: created_port_ids = self._update_ports_for_instance( [ 611.071747] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.071747] env[61898]: with excutils.save_and_reraise_exception(): [ 611.071747] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.071747] env[61898]: self.force_reraise() [ 611.071747] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.071747] env[61898]: raise self.value [ 611.071747] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.071747] env[61898]: updated_port = self._update_port( [ 611.071747] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.071747] env[61898]: _ensure_no_port_binding_failure(port) [ 611.071747] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.071747] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 611.072408] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 47837c36-a959-4078-8926-ab473335dca5, please check neutron logs for more information. [ 611.072408] env[61898]: Removing descriptor: 19 [ 611.072566] env[61898]: ERROR nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 47837c36-a959-4078-8926-ab473335dca5, please check neutron logs for more information. [ 611.072566] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] Traceback (most recent call last): [ 611.072566] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 611.072566] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] yield resources [ 611.072566] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 611.072566] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] self.driver.spawn(context, instance, image_meta, [ 611.072566] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 611.072566] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 611.072566] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 611.072566] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] vm_ref = self.build_virtual_machine(instance, [ 611.072566] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] vif_infos = vmwarevif.get_vif_info(self._session, [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] for vif in network_info: [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] return self._sync_wrapper(fn, *args, **kwargs) [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] self.wait() [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] self[:] = self._gt.wait() [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] return self._exit_event.wait() [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 611.072822] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] result = hub.switch() [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] return self.greenlet.switch() [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] result = function(*args, **kwargs) [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] return func(*args, **kwargs) [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] raise e [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] nwinfo = self.network_api.allocate_for_instance( [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] created_port_ids = self._update_ports_for_instance( [ 611.073138] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] with excutils.save_and_reraise_exception(): [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] self.force_reraise() [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] raise self.value [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] updated_port = self._update_port( [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] _ensure_no_port_binding_failure(port) [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] raise exception.PortBindingFailed(port_id=port['id']) [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] nova.exception.PortBindingFailed: Binding failed for port 47837c36-a959-4078-8926-ab473335dca5, please check neutron logs for more information. [ 611.073471] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] [ 611.073892] env[61898]: INFO nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Terminating instance [ 611.098427] env[61898]: DEBUG nova.virt.hardware [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 611.098711] env[61898]: DEBUG nova.virt.hardware [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 611.098926] env[61898]: DEBUG nova.virt.hardware [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 611.099190] env[61898]: DEBUG nova.virt.hardware [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 611.099535] env[61898]: DEBUG nova.virt.hardware [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 611.099735] env[61898]: DEBUG nova.virt.hardware [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 611.100484] env[61898]: DEBUG nova.virt.hardware [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 611.100484] env[61898]: DEBUG nova.virt.hardware [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 611.100484] env[61898]: DEBUG nova.virt.hardware [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 611.100694] env[61898]: DEBUG nova.virt.hardware [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 611.100841] env[61898]: DEBUG nova.virt.hardware [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 611.101991] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbad9c2e-ea30-4125-94f7-3e12e0527e52 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.111119] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f67326c-2763-410d-9948-a2397acf571c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.127018] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 611.132624] env[61898]: DEBUG oslo.service.loopingcall [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 611.132845] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 611.133060] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cee44568-5453-477e-ba48-a53316746c01 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.151411] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 611.151411] env[61898]: value = "task-1240397" [ 611.151411] env[61898]: _type = "Task" [ 611.151411] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.159671] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240397, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.296383] env[61898]: DEBUG nova.compute.manager [req-5c6eeb0c-cf1a-4a8c-829c-52fb06c207ff req-71035409-d749-475e-ab80-345eb16105dd service nova] [instance: 759d1958-0518-4654-8686-38be0920c29f] Received event network-changed-47837c36-a959-4078-8926-ab473335dca5 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 611.297550] env[61898]: DEBUG nova.compute.manager [req-5c6eeb0c-cf1a-4a8c-829c-52fb06c207ff req-71035409-d749-475e-ab80-345eb16105dd service nova] [instance: 759d1958-0518-4654-8686-38be0920c29f] Refreshing instance network info cache due to event network-changed-47837c36-a959-4078-8926-ab473335dca5. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 611.297550] env[61898]: DEBUG oslo_concurrency.lockutils [req-5c6eeb0c-cf1a-4a8c-829c-52fb06c207ff req-71035409-d749-475e-ab80-345eb16105dd service nova] Acquiring lock "refresh_cache-759d1958-0518-4654-8686-38be0920c29f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.297550] env[61898]: DEBUG oslo_concurrency.lockutils [req-5c6eeb0c-cf1a-4a8c-829c-52fb06c207ff req-71035409-d749-475e-ab80-345eb16105dd service nova] Acquired lock "refresh_cache-759d1958-0518-4654-8686-38be0920c29f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.297550] env[61898]: DEBUG nova.network.neutron [req-5c6eeb0c-cf1a-4a8c-829c-52fb06c207ff req-71035409-d749-475e-ab80-345eb16105dd service nova] [instance: 759d1958-0518-4654-8686-38be0920c29f] Refreshing network info cache for port 47837c36-a959-4078-8926-ab473335dca5 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 611.302234] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e3fdbf11-4cc2-48d9-99e4-59602f50b698 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.302757] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.511s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.430033] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Releasing lock "refresh_cache-ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.430033] env[61898]: DEBUG nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 611.430033] env[61898]: DEBUG nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 611.430033] env[61898]: DEBUG nova.network.neutron [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 611.457815] env[61898]: DEBUG nova.network.neutron [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.581419] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "refresh_cache-759d1958-0518-4654-8686-38be0920c29f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.668307] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240397, 'name': CreateVM_Task, 'duration_secs': 0.313626} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.668671] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 611.669328] env[61898]: DEBUG oslo_concurrency.lockutils [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.669651] env[61898]: DEBUG oslo_concurrency.lockutils [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.670176] env[61898]: DEBUG oslo_concurrency.lockutils [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 611.670577] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-750c0226-9529-4eb4-a476-7c6d34d116f9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.677728] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Waiting for the task: (returnval){ [ 611.677728] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52c255b3-352f-6e2e-4748-9afbbcd81532" [ 611.677728] env[61898]: _type = "Task" [ 611.677728] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.693156] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c255b3-352f-6e2e-4748-9afbbcd81532, 'name': SearchDatastore_Task, 'duration_secs': 0.010633} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.693619] env[61898]: DEBUG oslo_concurrency.lockutils [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.693984] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 611.694361] env[61898]: DEBUG oslo_concurrency.lockutils [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.694608] env[61898]: DEBUG oslo_concurrency.lockutils [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.694982] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 611.696321] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-277ce28c-6269-491d-820d-ea9ce779839f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.707531] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 611.708029] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 611.709049] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-179d0489-e6dc-4373-8a06-e6952832b281 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.717284] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Waiting for the task: (returnval){ [ 611.717284] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]526a5e78-8057-7976-e3fb-42f344fb91ae" [ 611.717284] env[61898]: _type = "Task" [ 611.717284] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.732082] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]526a5e78-8057-7976-e3fb-42f344fb91ae, 'name': SearchDatastore_Task, 'duration_secs': 0.009277} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.733148] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-420c7a4c-57a3-4f46-80a7-4b096339e9d5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.742079] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Waiting for the task: (returnval){ [ 611.742079] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521cd234-1bee-1650-fef9-cd43cbb4727c" [ 611.742079] env[61898]: _type = "Task" [ 611.742079] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.753015] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521cd234-1bee-1650-fef9-cd43cbb4727c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.848742] env[61898]: DEBUG nova.network.neutron [req-5c6eeb0c-cf1a-4a8c-829c-52fb06c207ff req-71035409-d749-475e-ab80-345eb16105dd service nova] [instance: 759d1958-0518-4654-8686-38be0920c29f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.960102] env[61898]: DEBUG nova.network.neutron [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.102836] env[61898]: DEBUG nova.network.neutron [req-5c6eeb0c-cf1a-4a8c-829c-52fb06c207ff req-71035409-d749-475e-ab80-345eb16105dd service nova] [instance: 759d1958-0518-4654-8686-38be0920c29f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.258214] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521cd234-1bee-1650-fef9-cd43cbb4727c, 'name': SearchDatastore_Task, 'duration_secs': 0.010327} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.258447] env[61898]: DEBUG oslo_concurrency.lockutils [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.259296] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 612.259296] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4061bb76-f5d9-4f8e-aa4c-063b725180c0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.268790] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Waiting for the task: (returnval){ [ 612.268790] env[61898]: value = "task-1240398" [ 612.268790] env[61898]: _type = "Task" [ 612.268790] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.282784] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240398, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.359236] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7031c58-17bb-44e2-a76c-46a25cfc105c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.369302] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12848f6-ea60-4df7-86b8-c558a42f69fa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.406457] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13053802-b49c-4cd6-8e78-1ea6cf6d1768 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.415901] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b22cac4-442d-4c07-892e-3c3915e47774 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.431604] env[61898]: DEBUG nova.compute.provider_tree [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.463020] env[61898]: INFO nova.compute.manager [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] [instance: ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0] Took 1.03 seconds to deallocate network for instance. [ 612.607951] env[61898]: DEBUG oslo_concurrency.lockutils [req-5c6eeb0c-cf1a-4a8c-829c-52fb06c207ff req-71035409-d749-475e-ab80-345eb16105dd service nova] Releasing lock "refresh_cache-759d1958-0518-4654-8686-38be0920c29f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.608246] env[61898]: DEBUG nova.compute.manager [req-5c6eeb0c-cf1a-4a8c-829c-52fb06c207ff req-71035409-d749-475e-ab80-345eb16105dd service nova] [instance: 759d1958-0518-4654-8686-38be0920c29f] Received event network-vif-deleted-47837c36-a959-4078-8926-ab473335dca5 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 612.608601] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquired lock "refresh_cache-759d1958-0518-4654-8686-38be0920c29f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.608766] env[61898]: DEBUG nova.network.neutron [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.792983] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240398, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.936140] env[61898]: DEBUG nova.scheduler.client.report [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 613.166234] env[61898]: DEBUG nova.network.neutron [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.283830] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240398, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.881855} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.284339] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 613.284658] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 613.285135] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c7015cc-c923-4bd5-90db-de8c82a8ebaa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.293332] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Waiting for the task: (returnval){ [ 613.293332] env[61898]: value = "task-1240399" [ 613.293332] env[61898]: _type = "Task" [ 613.293332] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.305159] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240399, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.425636] env[61898]: DEBUG nova.network.neutron [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.443983] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.141s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.443983] env[61898]: ERROR nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f1a03b78-20f3-4440-a998-e94de3baca8a, please check neutron logs for more information. [ 613.443983] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] Traceback (most recent call last): [ 613.443983] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 613.443983] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] self.driver.spawn(context, instance, image_meta, [ 613.443983] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 613.443983] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 613.443983] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 613.443983] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] vm_ref = self.build_virtual_machine(instance, [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] vif_infos = vmwarevif.get_vif_info(self._session, [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] for vif in network_info: [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] return self._sync_wrapper(fn, *args, **kwargs) [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] self.wait() [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] self[:] = self._gt.wait() [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] return self._exit_event.wait() [ 613.444243] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] current.throw(*self._exc) [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] result = function(*args, **kwargs) [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] return func(*args, **kwargs) [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] raise e [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] nwinfo = self.network_api.allocate_for_instance( [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] created_port_ids = self._update_ports_for_instance( [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 613.444544] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] with excutils.save_and_reraise_exception(): [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] self.force_reraise() [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] raise self.value [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] updated_port = self._update_port( [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] _ensure_no_port_binding_failure(port) [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] raise exception.PortBindingFailed(port_id=port['id']) [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] nova.exception.PortBindingFailed: Binding failed for port f1a03b78-20f3-4440-a998-e94de3baca8a, please check neutron logs for more information. [ 613.444872] env[61898]: ERROR nova.compute.manager [instance: d74776d1-f374-4761-976c-f073b3821f42] [ 613.445171] env[61898]: DEBUG nova.compute.utils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Binding failed for port f1a03b78-20f3-4440-a998-e94de3baca8a, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 613.447444] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.173s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.448909] env[61898]: INFO nova.compute.claims [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.452578] env[61898]: DEBUG nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Build of instance d74776d1-f374-4761-976c-f073b3821f42 was re-scheduled: Binding failed for port f1a03b78-20f3-4440-a998-e94de3baca8a, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 613.453069] env[61898]: DEBUG nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 613.453535] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "refresh_cache-d74776d1-f374-4761-976c-f073b3821f42" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.453737] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquired lock "refresh_cache-d74776d1-f374-4761-976c-f073b3821f42" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.453932] env[61898]: DEBUG nova.network.neutron [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 613.521012] env[61898]: INFO nova.scheduler.client.report [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Deleted allocations for instance ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0 [ 613.805765] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240399, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072606} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.806200] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 613.806829] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0590fa-fae4-4855-ac41-0fa5e1c7e46c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.828924] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 613.828924] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c5d9a58-29ea-439a-82fc-583e7ef5d739 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.849418] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Waiting for the task: (returnval){ [ 613.849418] env[61898]: value = "task-1240400" [ 613.849418] env[61898]: _type = "Task" [ 613.849418] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.858334] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240400, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.932554] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Releasing lock "refresh_cache-759d1958-0518-4654-8686-38be0920c29f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.933649] env[61898]: DEBUG nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 613.935287] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.935287] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b54eb27-1ba2-4015-b2da-7755ae96415b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.948086] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f94a379-5b13-4791-9638-b1b44f99eab4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.982405] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 759d1958-0518-4654-8686-38be0920c29f could not be found. [ 613.982405] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 613.982405] env[61898]: INFO nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 613.982581] env[61898]: DEBUG oslo.service.loopingcall [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 613.982816] env[61898]: DEBUG nova.compute.manager [-] [instance: 759d1958-0518-4654-8686-38be0920c29f] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 613.982901] env[61898]: DEBUG nova.network.neutron [-] [instance: 759d1958-0518-4654-8686-38be0920c29f] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 613.996300] env[61898]: DEBUG nova.network.neutron [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.010653] env[61898]: DEBUG nova.network.neutron [-] [instance: 759d1958-0518-4654-8686-38be0920c29f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.029829] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ac0b5eb-b251-43af-b514-e775d1b2da80 tempest-ServersAdminNegativeTestJSON-819255188 tempest-ServersAdminNegativeTestJSON-819255188-project-member] Lock "ada4c1f9-ee1e-421b-9d97-fc3fddcf0ce0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.879s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.166317] env[61898]: DEBUG nova.network.neutron [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.365082] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240400, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.513084] env[61898]: DEBUG nova.network.neutron [-] [instance: 759d1958-0518-4654-8686-38be0920c29f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.533836] env[61898]: DEBUG nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 614.673383] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Releasing lock "refresh_cache-d74776d1-f374-4761-976c-f073b3821f42" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.673620] env[61898]: DEBUG nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 614.673795] env[61898]: DEBUG nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 614.673962] env[61898]: DEBUG nova.network.neutron [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.692974] env[61898]: DEBUG nova.network.neutron [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.861337] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240400, 'name': ReconfigVM_Task, 'duration_secs': 0.598681} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.864450] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 7ef91986-fb46-478b-85a5-05d597790ad9/7ef91986-fb46-478b-85a5-05d597790ad9.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 614.865477] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-60ce4968-bc2b-4fbe-bd08-7379102c56b7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.875435] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Waiting for the task: (returnval){ [ 614.875435] env[61898]: value = "task-1240401" [ 614.875435] env[61898]: _type = "Task" [ 614.875435] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.886213] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240401, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.000306] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cedbd29-4bef-4e4b-b5e4-cb6566613c6c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.009424] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781afda4-e1c6-47c6-8370-4be4ffdc5f76 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.016330] env[61898]: INFO nova.compute.manager [-] [instance: 759d1958-0518-4654-8686-38be0920c29f] Took 1.03 seconds to deallocate network for instance. [ 615.044981] env[61898]: DEBUG nova.compute.claims [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 615.045920] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.045920] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a0fde8-0fa5-4ff3-bbce-b7398341ff34 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.058092] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8eb48d-8ed1-474b-aafa-9cf5aec71718 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.072506] env[61898]: DEBUG nova.compute.provider_tree [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.074414] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.195857] env[61898]: DEBUG nova.network.neutron [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.390526] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240401, 'name': Rename_Task, 'duration_secs': 0.149047} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.390526] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 615.390526] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02765f98-dfef-4167-b9f7-21d909225979 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.399603] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Waiting for the task: (returnval){ [ 615.399603] env[61898]: value = "task-1240402" [ 615.399603] env[61898]: _type = "Task" [ 615.399603] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.410073] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240402, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.576082] env[61898]: DEBUG nova.scheduler.client.report [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 615.702225] env[61898]: INFO nova.compute.manager [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: d74776d1-f374-4761-976c-f073b3821f42] Took 1.03 seconds to deallocate network for instance. [ 615.914528] env[61898]: DEBUG oslo_vmware.api [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Task: {'id': task-1240402, 'name': PowerOnVM_Task, 'duration_secs': 0.42906} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.914844] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 615.917017] env[61898]: DEBUG nova.compute.manager [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 615.917017] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfad8d46-0f48-49b8-8b36-6e3da5665a7a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.083527] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.635s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.084144] env[61898]: DEBUG nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 616.088096] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.249s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.090286] env[61898]: INFO nova.compute.claims [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.439998] env[61898]: DEBUG oslo_concurrency.lockutils [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.599063] env[61898]: DEBUG nova.compute.utils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 616.603045] env[61898]: DEBUG nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 616.603045] env[61898]: DEBUG nova.network.neutron [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 616.659716] env[61898]: DEBUG nova.policy [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6142b6dd97fa48199f3edeb8ceef8d70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b549768af40c4edbb845f0e1f27ab52c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 616.740948] env[61898]: INFO nova.scheduler.client.report [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Deleted allocations for instance d74776d1-f374-4761-976c-f073b3821f42 [ 616.782955] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.783145] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.114128] env[61898]: DEBUG nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 617.131911] env[61898]: DEBUG nova.network.neutron [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Successfully created port: 950f9b51-3d4f-4df9-98a0-aa4e423ec4ac {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 617.254498] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e82b011f-38ce-4c47-81d2-ddfd62a7866c tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "d74776d1-f374-4761-976c-f073b3821f42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.411s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.622066] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef4d0ec-f510-468f-8880-f139cc607d0d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.631198] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0bec3f3-5528-457e-9b6e-b185bf6ba14e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.666111] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquiring lock "7ef91986-fb46-478b-85a5-05d597790ad9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.666394] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "7ef91986-fb46-478b-85a5-05d597790ad9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.666596] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquiring lock "7ef91986-fb46-478b-85a5-05d597790ad9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.666775] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "7ef91986-fb46-478b-85a5-05d597790ad9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.666942] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "7ef91986-fb46-478b-85a5-05d597790ad9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.669413] env[61898]: INFO nova.compute.manager [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Terminating instance [ 617.673012] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d336ced-a3b5-4def-a0d5-317df7c30122 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.683574] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab6cd8b-cd5a-442d-8d60-2974c508ed65 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.704085] env[61898]: DEBUG nova.compute.provider_tree [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.739364] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "4c744673-0d9b-44ef-938f-372b101a2053" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.739650] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "4c744673-0d9b-44ef-938f-372b101a2053" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.757210] env[61898]: DEBUG nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 618.121796] env[61898]: DEBUG nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 618.154252] env[61898]: DEBUG nova.virt.hardware [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.154485] env[61898]: DEBUG nova.virt.hardware [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.154634] env[61898]: DEBUG nova.virt.hardware [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.154815] env[61898]: DEBUG nova.virt.hardware [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.154953] env[61898]: DEBUG nova.virt.hardware [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.155121] env[61898]: DEBUG nova.virt.hardware [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.155465] env[61898]: DEBUG nova.virt.hardware [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.155701] env[61898]: DEBUG nova.virt.hardware [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.155880] env[61898]: DEBUG nova.virt.hardware [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.156052] env[61898]: DEBUG nova.virt.hardware [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.156225] env[61898]: DEBUG nova.virt.hardware [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.157107] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6732dc-b4da-4875-aeb5-2a823847078b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.166622] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c8f44a1-4f2c-4985-b131-bb927d437564 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.181584] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquiring lock "refresh_cache-7ef91986-fb46-478b-85a5-05d597790ad9" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.181746] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquired lock "refresh_cache-7ef91986-fb46-478b-85a5-05d597790ad9" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.181914] env[61898]: DEBUG nova.network.neutron [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 618.205156] env[61898]: DEBUG nova.scheduler.client.report [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 618.292681] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.527098] env[61898]: DEBUG nova.compute.manager [req-f1427c2a-3534-4bbc-9093-64d43bd6b052 req-f4ca2cc7-2488-4b85-933a-87d40950a18f service nova] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Received event network-changed-950f9b51-3d4f-4df9-98a0-aa4e423ec4ac {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 618.527098] env[61898]: DEBUG nova.compute.manager [req-f1427c2a-3534-4bbc-9093-64d43bd6b052 req-f4ca2cc7-2488-4b85-933a-87d40950a18f service nova] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Refreshing instance network info cache due to event network-changed-950f9b51-3d4f-4df9-98a0-aa4e423ec4ac. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 618.527098] env[61898]: DEBUG oslo_concurrency.lockutils [req-f1427c2a-3534-4bbc-9093-64d43bd6b052 req-f4ca2cc7-2488-4b85-933a-87d40950a18f service nova] Acquiring lock "refresh_cache-cfb2f64b-7026-444d-8f86-500445343ac1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.527098] env[61898]: DEBUG oslo_concurrency.lockutils [req-f1427c2a-3534-4bbc-9093-64d43bd6b052 req-f4ca2cc7-2488-4b85-933a-87d40950a18f service nova] Acquired lock "refresh_cache-cfb2f64b-7026-444d-8f86-500445343ac1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.527098] env[61898]: DEBUG nova.network.neutron [req-f1427c2a-3534-4bbc-9093-64d43bd6b052 req-f4ca2cc7-2488-4b85-933a-87d40950a18f service nova] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Refreshing network info cache for port 950f9b51-3d4f-4df9-98a0-aa4e423ec4ac {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 618.702284] env[61898]: DEBUG nova.network.neutron [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.703852] env[61898]: ERROR nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 950f9b51-3d4f-4df9-98a0-aa4e423ec4ac, please check neutron logs for more information. [ 618.703852] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 618.703852] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 618.703852] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 618.703852] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.703852] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 618.703852] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.703852] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 618.703852] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.703852] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 618.703852] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.703852] env[61898]: ERROR nova.compute.manager raise self.value [ 618.703852] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.703852] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 618.703852] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.703852] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 618.704328] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.704328] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 618.704328] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 950f9b51-3d4f-4df9-98a0-aa4e423ec4ac, please check neutron logs for more information. [ 618.704328] env[61898]: ERROR nova.compute.manager [ 618.705068] env[61898]: Traceback (most recent call last): [ 618.705068] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 618.705068] env[61898]: listener.cb(fileno) [ 618.705068] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.705068] env[61898]: result = function(*args, **kwargs) [ 618.705068] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 618.705068] env[61898]: return func(*args, **kwargs) [ 618.705068] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 618.705068] env[61898]: raise e [ 618.705068] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 618.705068] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 618.705068] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.705068] env[61898]: created_port_ids = self._update_ports_for_instance( [ 618.705068] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.705068] env[61898]: with excutils.save_and_reraise_exception(): [ 618.705068] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.705068] env[61898]: self.force_reraise() [ 618.705068] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.705068] env[61898]: raise self.value [ 618.705068] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.705068] env[61898]: updated_port = self._update_port( [ 618.705068] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.705068] env[61898]: _ensure_no_port_binding_failure(port) [ 618.705068] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.705068] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 618.705068] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 950f9b51-3d4f-4df9-98a0-aa4e423ec4ac, please check neutron logs for more information. [ 618.705068] env[61898]: Removing descriptor: 19 [ 618.706744] env[61898]: ERROR nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 950f9b51-3d4f-4df9-98a0-aa4e423ec4ac, please check neutron logs for more information. [ 618.706744] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Traceback (most recent call last): [ 618.706744] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 618.706744] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] yield resources [ 618.706744] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 618.706744] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] self.driver.spawn(context, instance, image_meta, [ 618.706744] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 618.706744] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 618.706744] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 618.706744] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] vm_ref = self.build_virtual_machine(instance, [ 618.706744] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] vif_infos = vmwarevif.get_vif_info(self._session, [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] for vif in network_info: [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] return self._sync_wrapper(fn, *args, **kwargs) [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] self.wait() [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] self[:] = self._gt.wait() [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] return self._exit_event.wait() [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 618.707030] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] result = hub.switch() [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] return self.greenlet.switch() [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] result = function(*args, **kwargs) [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] return func(*args, **kwargs) [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] raise e [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] nwinfo = self.network_api.allocate_for_instance( [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] created_port_ids = self._update_ports_for_instance( [ 618.707403] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] with excutils.save_and_reraise_exception(): [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] self.force_reraise() [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] raise self.value [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] updated_port = self._update_port( [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] _ensure_no_port_binding_failure(port) [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] raise exception.PortBindingFailed(port_id=port['id']) [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] nova.exception.PortBindingFailed: Binding failed for port 950f9b51-3d4f-4df9-98a0-aa4e423ec4ac, please check neutron logs for more information. [ 618.707765] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] [ 618.708153] env[61898]: INFO nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Terminating instance [ 618.710976] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.623s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.710976] env[61898]: DEBUG nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 618.716611] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.864s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.716611] env[61898]: INFO nova.compute.claims [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.777999] env[61898]: DEBUG nova.network.neutron [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.045417] env[61898]: DEBUG nova.network.neutron [req-f1427c2a-3534-4bbc-9093-64d43bd6b052 req-f4ca2cc7-2488-4b85-933a-87d40950a18f service nova] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.117573] env[61898]: DEBUG nova.network.neutron [req-f1427c2a-3534-4bbc-9093-64d43bd6b052 req-f4ca2cc7-2488-4b85-933a-87d40950a18f service nova] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.227956] env[61898]: DEBUG nova.compute.utils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 619.229670] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "refresh_cache-cfb2f64b-7026-444d-8f86-500445343ac1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.229976] env[61898]: DEBUG nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 619.231034] env[61898]: DEBUG nova.network.neutron [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 619.283281] env[61898]: DEBUG nova.policy [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf524551a5384bdda93929cd6aba4297', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03484f393357415f9a5244df3e2721ab', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 619.285125] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Releasing lock "refresh_cache-7ef91986-fb46-478b-85a5-05d597790ad9" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.285596] env[61898]: DEBUG nova.compute.manager [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 619.285837] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 619.286833] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d0d10d-0c8b-4dc3-b5c9-a1bf10194841 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.296444] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 619.296693] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa02ad4c-6f8f-4f33-b0b1-6048aeb7a37f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.303665] env[61898]: DEBUG oslo_vmware.api [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 619.303665] env[61898]: value = "task-1240403" [ 619.303665] env[61898]: _type = "Task" [ 619.303665] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.314381] env[61898]: DEBUG oslo_vmware.api [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240403, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.549555] env[61898]: DEBUG nova.network.neutron [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Successfully created port: 7371fb07-a597-4fd5-8d37-e0669fc56203 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 619.621786] env[61898]: DEBUG oslo_concurrency.lockutils [req-f1427c2a-3534-4bbc-9093-64d43bd6b052 req-f4ca2cc7-2488-4b85-933a-87d40950a18f service nova] Releasing lock "refresh_cache-cfb2f64b-7026-444d-8f86-500445343ac1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.622213] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquired lock "refresh_cache-cfb2f64b-7026-444d-8f86-500445343ac1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.622401] env[61898]: DEBUG nova.network.neutron [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 619.733689] env[61898]: DEBUG nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 619.814109] env[61898]: DEBUG oslo_vmware.api [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240403, 'name': PowerOffVM_Task, 'duration_secs': 0.128346} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.814671] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 619.814839] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 619.815102] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68a023aa-d87b-42bb-9d0a-64b92f8d35ed {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.850176] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 619.850176] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 619.850176] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Deleting the datastore file [datastore2] 7ef91986-fb46-478b-85a5-05d597790ad9 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 619.850176] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5dac5015-b181-4cb9-9e53-ce30e131f7a2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.863358] env[61898]: DEBUG oslo_vmware.api [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for the task: (returnval){ [ 619.863358] env[61898]: value = "task-1240405" [ 619.863358] env[61898]: _type = "Task" [ 619.863358] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.874818] env[61898]: DEBUG oslo_vmware.api [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240405, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.140042] env[61898]: DEBUG nova.network.neutron [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.224801] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb6b261b-f901-4716-975b-69ad78bb3201 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.233221] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc51ff85-0945-4cf1-bb16-55343e41d07c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.271920] env[61898]: DEBUG nova.network.neutron [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.273912] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d620aaa-fb74-4b8c-aecc-de29c9d695ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.284181] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46de599a-e344-4289-9a83-b025059e66b6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.300015] env[61898]: DEBUG nova.compute.provider_tree [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.304516] env[61898]: DEBUG nova.compute.manager [req-2ac0df2c-a82d-4a97-a4f5-55080e31ac4b req-a8ec7bcf-c4db-49d8-94fe-21790574eb3f service nova] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Received event network-changed-7371fb07-a597-4fd5-8d37-e0669fc56203 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 620.304516] env[61898]: DEBUG nova.compute.manager [req-2ac0df2c-a82d-4a97-a4f5-55080e31ac4b req-a8ec7bcf-c4db-49d8-94fe-21790574eb3f service nova] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Refreshing instance network info cache due to event network-changed-7371fb07-a597-4fd5-8d37-e0669fc56203. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 620.304516] env[61898]: DEBUG oslo_concurrency.lockutils [req-2ac0df2c-a82d-4a97-a4f5-55080e31ac4b req-a8ec7bcf-c4db-49d8-94fe-21790574eb3f service nova] Acquiring lock "refresh_cache-f968f3df-c70b-466b-8aaa-879354f12d3b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.304516] env[61898]: DEBUG oslo_concurrency.lockutils [req-2ac0df2c-a82d-4a97-a4f5-55080e31ac4b req-a8ec7bcf-c4db-49d8-94fe-21790574eb3f service nova] Acquired lock "refresh_cache-f968f3df-c70b-466b-8aaa-879354f12d3b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.304516] env[61898]: DEBUG nova.network.neutron [req-2ac0df2c-a82d-4a97-a4f5-55080e31ac4b req-a8ec7bcf-c4db-49d8-94fe-21790574eb3f service nova] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Refreshing network info cache for port 7371fb07-a597-4fd5-8d37-e0669fc56203 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 620.371877] env[61898]: DEBUG oslo_vmware.api [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Task: {'id': task-1240405, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101555} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.372149] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 620.372325] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 620.372492] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.374019] env[61898]: INFO nova.compute.manager [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Took 1.09 seconds to destroy the instance on the hypervisor. [ 620.374019] env[61898]: DEBUG oslo.service.loopingcall [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 620.374019] env[61898]: DEBUG nova.compute.manager [-] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 620.374019] env[61898]: DEBUG nova.network.neutron [-] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.400623] env[61898]: DEBUG nova.network.neutron [-] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.457754] env[61898]: ERROR nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7371fb07-a597-4fd5-8d37-e0669fc56203, please check neutron logs for more information. [ 620.457754] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 620.457754] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 620.457754] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 620.457754] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 620.457754] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 620.457754] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 620.457754] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 620.457754] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 620.457754] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 620.457754] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 620.457754] env[61898]: ERROR nova.compute.manager raise self.value [ 620.457754] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 620.457754] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 620.457754] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 620.457754] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 620.458233] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 620.458233] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 620.458233] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7371fb07-a597-4fd5-8d37-e0669fc56203, please check neutron logs for more information. [ 620.458233] env[61898]: ERROR nova.compute.manager [ 620.458233] env[61898]: Traceback (most recent call last): [ 620.458233] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 620.458233] env[61898]: listener.cb(fileno) [ 620.458233] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 620.458233] env[61898]: result = function(*args, **kwargs) [ 620.458233] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 620.458233] env[61898]: return func(*args, **kwargs) [ 620.458233] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 620.458233] env[61898]: raise e [ 620.458233] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 620.458233] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 620.458233] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 620.458233] env[61898]: created_port_ids = self._update_ports_for_instance( [ 620.458233] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 620.458233] env[61898]: with excutils.save_and_reraise_exception(): [ 620.458233] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 620.458233] env[61898]: self.force_reraise() [ 620.458233] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 620.458233] env[61898]: raise self.value [ 620.458233] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 620.458233] env[61898]: updated_port = self._update_port( [ 620.458233] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 620.458233] env[61898]: _ensure_no_port_binding_failure(port) [ 620.458233] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 620.458233] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 620.458957] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 7371fb07-a597-4fd5-8d37-e0669fc56203, please check neutron logs for more information. [ 620.458957] env[61898]: Removing descriptor: 20 [ 620.547238] env[61898]: DEBUG nova.compute.manager [req-941a3afc-dc6a-4804-9638-5ac82f59e9c7 req-bb090ab8-0acd-4d8e-b13b-a037370460f6 service nova] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Received event network-vif-deleted-950f9b51-3d4f-4df9-98a0-aa4e423ec4ac {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 620.742743] env[61898]: DEBUG nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 620.768828] env[61898]: DEBUG nova.virt.hardware [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 620.769080] env[61898]: DEBUG nova.virt.hardware [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 620.769247] env[61898]: DEBUG nova.virt.hardware [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 620.769984] env[61898]: DEBUG nova.virt.hardware [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 620.769984] env[61898]: DEBUG nova.virt.hardware [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 620.769984] env[61898]: DEBUG nova.virt.hardware [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 620.769984] env[61898]: DEBUG nova.virt.hardware [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 620.770187] env[61898]: DEBUG nova.virt.hardware [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 620.770272] env[61898]: DEBUG nova.virt.hardware [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 620.770419] env[61898]: DEBUG nova.virt.hardware [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 620.770583] env[61898]: DEBUG nova.virt.hardware [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 620.771525] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3582eff7-91e4-4469-841c-b3b80e9b191d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.779070] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Releasing lock "refresh_cache-cfb2f64b-7026-444d-8f86-500445343ac1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.779442] env[61898]: DEBUG nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 620.779627] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 620.779937] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-adccb74e-2285-4a20-a5a8-78b4c8c9591e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.782616] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f009f0e-7cc7-46e6-a021-98fbbe1af8da {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.797281] env[61898]: ERROR nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7371fb07-a597-4fd5-8d37-e0669fc56203, please check neutron logs for more information. [ 620.797281] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Traceback (most recent call last): [ 620.797281] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 620.797281] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] yield resources [ 620.797281] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 620.797281] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] self.driver.spawn(context, instance, image_meta, [ 620.797281] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 620.797281] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 620.797281] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 620.797281] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] vm_ref = self.build_virtual_machine(instance, [ 620.797281] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] vif_infos = vmwarevif.get_vif_info(self._session, [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] for vif in network_info: [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] return self._sync_wrapper(fn, *args, **kwargs) [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] self.wait() [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] self[:] = self._gt.wait() [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] return self._exit_event.wait() [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 620.797644] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] current.throw(*self._exc) [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] result = function(*args, **kwargs) [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] return func(*args, **kwargs) [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] raise e [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] nwinfo = self.network_api.allocate_for_instance( [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] created_port_ids = self._update_ports_for_instance( [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] with excutils.save_and_reraise_exception(): [ 620.798031] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 620.798390] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] self.force_reraise() [ 620.798390] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 620.798390] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] raise self.value [ 620.798390] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 620.798390] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] updated_port = self._update_port( [ 620.798390] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 620.798390] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] _ensure_no_port_binding_failure(port) [ 620.798390] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 620.798390] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] raise exception.PortBindingFailed(port_id=port['id']) [ 620.798390] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] nova.exception.PortBindingFailed: Binding failed for port 7371fb07-a597-4fd5-8d37-e0669fc56203, please check neutron logs for more information. [ 620.798390] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] [ 620.798390] env[61898]: INFO nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Terminating instance [ 620.803289] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18160b6a-4774-4a16-a661-2dc577bce479 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.814013] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Acquiring lock "refresh_cache-f968f3df-c70b-466b-8aaa-879354f12d3b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.814947] env[61898]: DEBUG nova.scheduler.client.report [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 620.834065] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cfb2f64b-7026-444d-8f86-500445343ac1 could not be found. [ 620.834304] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.834483] env[61898]: INFO nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 620.834718] env[61898]: DEBUG oslo.service.loopingcall [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 620.835599] env[61898]: DEBUG nova.network.neutron [req-2ac0df2c-a82d-4a97-a4f5-55080e31ac4b req-a8ec7bcf-c4db-49d8-94fe-21790574eb3f service nova] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.837195] env[61898]: DEBUG nova.compute.manager [-] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 620.837295] env[61898]: DEBUG nova.network.neutron [-] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.870253] env[61898]: DEBUG nova.network.neutron [-] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.903549] env[61898]: DEBUG nova.network.neutron [-] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.914630] env[61898]: DEBUG nova.network.neutron [req-2ac0df2c-a82d-4a97-a4f5-55080e31ac4b req-a8ec7bcf-c4db-49d8-94fe-21790574eb3f service nova] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.075903] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.076143] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.320614] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.606s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.321250] env[61898]: DEBUG nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 621.324044] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.260s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.324278] env[61898]: DEBUG nova.objects.instance [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lazy-loading 'resources' on Instance uuid 0dfabd80-a385-4124-af33-083559819d7a {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 621.375905] env[61898]: DEBUG nova.network.neutron [-] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.406428] env[61898]: INFO nova.compute.manager [-] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Took 1.03 seconds to deallocate network for instance. [ 621.416961] env[61898]: DEBUG oslo_concurrency.lockutils [req-2ac0df2c-a82d-4a97-a4f5-55080e31ac4b req-a8ec7bcf-c4db-49d8-94fe-21790574eb3f service nova] Releasing lock "refresh_cache-f968f3df-c70b-466b-8aaa-879354f12d3b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.417360] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Acquired lock "refresh_cache-f968f3df-c70b-466b-8aaa-879354f12d3b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.417547] env[61898]: DEBUG nova.network.neutron [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 621.582857] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 621.584030] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 621.584030] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Rebuilding the list of instances to heal {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 621.831513] env[61898]: DEBUG nova.compute.utils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 621.833038] env[61898]: DEBUG nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 621.833038] env[61898]: DEBUG nova.network.neutron [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 621.872880] env[61898]: DEBUG nova.policy [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '79e4769fd1c94853b67052cd90107f80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7346be30885241d2aeacc56899ee8f75', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 621.879046] env[61898]: INFO nova.compute.manager [-] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Took 1.04 seconds to deallocate network for instance. [ 621.881652] env[61898]: DEBUG nova.compute.claims [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 621.881932] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.915914] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.946061] env[61898]: DEBUG nova.network.neutron [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.059071] env[61898]: DEBUG nova.network.neutron [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.086969] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 622.090177] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 622.090177] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 759d1958-0518-4654-8686-38be0920c29f] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 622.090177] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 622.090177] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 622.090177] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 622.108217] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "refresh_cache-41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.108416] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquired lock "refresh_cache-41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.108596] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Forcefully refreshing network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 622.108776] env[61898]: DEBUG nova.objects.instance [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lazy-loading 'info_cache' on Instance uuid 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 622.176250] env[61898]: DEBUG nova.network.neutron [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Successfully created port: 4ee645ae-c20a-4320-bbd1-8806bfe20177 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.265950] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79459de-eae8-4b39-a955-2bccf802b489 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.274065] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f17318d-b494-4cda-8e5c-bfa0e299463b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.304087] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5be1ce-e78a-4327-8c0d-ac1d1e25142c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.312220] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74830856-2841-460f-a743-1e0e62ac9836 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.327726] env[61898]: DEBUG nova.compute.provider_tree [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.331299] env[61898]: DEBUG nova.compute.manager [req-25f3377b-ff6a-459c-b8a1-a24885642c80 req-380606b9-58ac-4d44-8f61-06c7f78937eb service nova] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Received event network-vif-deleted-7371fb07-a597-4fd5-8d37-e0669fc56203 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 622.336877] env[61898]: DEBUG nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 622.563604] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Releasing lock "refresh_cache-f968f3df-c70b-466b-8aaa-879354f12d3b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.564408] env[61898]: DEBUG nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 622.567079] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 622.567079] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7d1f445-158f-4281-9364-17c343540716 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.575919] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f9869c-e8c4-4777-b43f-b1aa81921a94 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.601145] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f968f3df-c70b-466b-8aaa-879354f12d3b could not be found. [ 622.601145] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 622.601145] env[61898]: INFO nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 622.601145] env[61898]: DEBUG oslo.service.loopingcall [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 622.601145] env[61898]: DEBUG nova.compute.manager [-] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 622.601145] env[61898]: DEBUG nova.network.neutron [-] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 622.618568] env[61898]: DEBUG nova.network.neutron [-] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.830749] env[61898]: DEBUG nova.scheduler.client.report [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 623.018445] env[61898]: ERROR nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4ee645ae-c20a-4320-bbd1-8806bfe20177, please check neutron logs for more information. [ 623.018445] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 623.018445] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 623.018445] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 623.018445] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 623.018445] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 623.018445] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 623.018445] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 623.018445] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.018445] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 623.018445] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.018445] env[61898]: ERROR nova.compute.manager raise self.value [ 623.018445] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 623.018445] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 623.018445] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.018445] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 623.019283] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.019283] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 623.019283] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4ee645ae-c20a-4320-bbd1-8806bfe20177, please check neutron logs for more information. [ 623.019283] env[61898]: ERROR nova.compute.manager [ 623.019283] env[61898]: Traceback (most recent call last): [ 623.019283] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 623.019283] env[61898]: listener.cb(fileno) [ 623.019283] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 623.019283] env[61898]: result = function(*args, **kwargs) [ 623.019283] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 623.019283] env[61898]: return func(*args, **kwargs) [ 623.019283] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 623.019283] env[61898]: raise e [ 623.019283] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 623.019283] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 623.019283] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 623.019283] env[61898]: created_port_ids = self._update_ports_for_instance( [ 623.019283] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 623.019283] env[61898]: with excutils.save_and_reraise_exception(): [ 623.019283] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.019283] env[61898]: self.force_reraise() [ 623.019283] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.019283] env[61898]: raise self.value [ 623.019283] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 623.019283] env[61898]: updated_port = self._update_port( [ 623.019283] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.019283] env[61898]: _ensure_no_port_binding_failure(port) [ 623.019283] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.019283] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 623.020178] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 4ee645ae-c20a-4320-bbd1-8806bfe20177, please check neutron logs for more information. [ 623.020178] env[61898]: Removing descriptor: 20 [ 623.119347] env[61898]: DEBUG nova.network.neutron [-] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.134874] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.336778] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.013s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.340056] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.218s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.345203] env[61898]: DEBUG nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 623.358750] env[61898]: INFO nova.scheduler.client.report [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Deleted allocations for instance 0dfabd80-a385-4124-af33-083559819d7a [ 623.370987] env[61898]: DEBUG nova.virt.hardware [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 623.371257] env[61898]: DEBUG nova.virt.hardware [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 623.371414] env[61898]: DEBUG nova.virt.hardware [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.371625] env[61898]: DEBUG nova.virt.hardware [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 623.371995] env[61898]: DEBUG nova.virt.hardware [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.371995] env[61898]: DEBUG nova.virt.hardware [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 623.372151] env[61898]: DEBUG nova.virt.hardware [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 623.372324] env[61898]: DEBUG nova.virt.hardware [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 623.372489] env[61898]: DEBUG nova.virt.hardware [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 623.372676] env[61898]: DEBUG nova.virt.hardware [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 623.372849] env[61898]: DEBUG nova.virt.hardware [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 623.374074] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a567da-e62b-4b8d-8e11-42329ff32f17 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.382412] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7451fd1-f3f5-4702-acef-3920ffcad7f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.397688] env[61898]: ERROR nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4ee645ae-c20a-4320-bbd1-8806bfe20177, please check neutron logs for more information. [ 623.397688] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Traceback (most recent call last): [ 623.397688] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 623.397688] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] yield resources [ 623.397688] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 623.397688] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] self.driver.spawn(context, instance, image_meta, [ 623.397688] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 623.397688] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 623.397688] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 623.397688] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] vm_ref = self.build_virtual_machine(instance, [ 623.397688] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] vif_infos = vmwarevif.get_vif_info(self._session, [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] for vif in network_info: [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] return self._sync_wrapper(fn, *args, **kwargs) [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] self.wait() [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] self[:] = self._gt.wait() [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] return self._exit_event.wait() [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 623.398128] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] current.throw(*self._exc) [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] result = function(*args, **kwargs) [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] return func(*args, **kwargs) [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] raise e [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] nwinfo = self.network_api.allocate_for_instance( [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] created_port_ids = self._update_ports_for_instance( [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] with excutils.save_and_reraise_exception(): [ 623.398476] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.398820] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] self.force_reraise() [ 623.398820] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.398820] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] raise self.value [ 623.398820] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 623.398820] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] updated_port = self._update_port( [ 623.398820] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.398820] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] _ensure_no_port_binding_failure(port) [ 623.398820] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.398820] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] raise exception.PortBindingFailed(port_id=port['id']) [ 623.398820] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] nova.exception.PortBindingFailed: Binding failed for port 4ee645ae-c20a-4320-bbd1-8806bfe20177, please check neutron logs for more information. [ 623.398820] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] [ 623.398820] env[61898]: INFO nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Terminating instance [ 623.622327] env[61898]: INFO nova.compute.manager [-] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Took 1.02 seconds to deallocate network for instance. [ 623.627976] env[61898]: DEBUG nova.compute.claims [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 623.628069] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.686040] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.869131] env[61898]: DEBUG oslo_concurrency.lockutils [None req-164edbc7-c8fe-4357-8516-0abe36556e5a tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "0dfabd80-a385-4124-af33-083559819d7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.209s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.905597] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Acquiring lock "refresh_cache-1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.905777] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Acquired lock "refresh_cache-1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.905950] env[61898]: DEBUG nova.network.neutron [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 624.183140] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d3dab5-8cb9-468a-91d0-bee13625698b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.190659] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5814def6-87c7-4780-82a5-e8d1d90d209e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.193759] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Releasing lock "refresh_cache-41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.193944] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Updated the network info_cache for instance {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 624.194154] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.194640] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.194800] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.194968] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.195135] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.195281] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.195406] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61898) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 624.195547] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.221115] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c993fd02-2bad-4af9-add3-238b1d31452f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.228929] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cb95d8-10e1-44cb-9a5a-eb48a7ebf76c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.243311] env[61898]: DEBUG nova.compute.provider_tree [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.354260] env[61898]: DEBUG nova.compute.manager [req-fe52623d-977a-402e-854c-5db0551bfcd7 req-6b60a4b2-cca1-4de8-9f88-23a85ed9ef53 service nova] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Received event network-changed-4ee645ae-c20a-4320-bbd1-8806bfe20177 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 624.354387] env[61898]: DEBUG nova.compute.manager [req-fe52623d-977a-402e-854c-5db0551bfcd7 req-6b60a4b2-cca1-4de8-9f88-23a85ed9ef53 service nova] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Refreshing instance network info cache due to event network-changed-4ee645ae-c20a-4320-bbd1-8806bfe20177. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 624.354580] env[61898]: DEBUG oslo_concurrency.lockutils [req-fe52623d-977a-402e-854c-5db0551bfcd7 req-6b60a4b2-cca1-4de8-9f88-23a85ed9ef53 service nova] Acquiring lock "refresh_cache-1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.425037] env[61898]: DEBUG nova.network.neutron [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.495309] env[61898]: DEBUG nova.network.neutron [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.698715] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.745901] env[61898]: DEBUG nova.scheduler.client.report [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 624.998944] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Releasing lock "refresh_cache-1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.998944] env[61898]: DEBUG nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 624.999152] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 624.999317] env[61898]: DEBUG oslo_concurrency.lockutils [req-fe52623d-977a-402e-854c-5db0551bfcd7 req-6b60a4b2-cca1-4de8-9f88-23a85ed9ef53 service nova] Acquired lock "refresh_cache-1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.999489] env[61898]: DEBUG nova.network.neutron [req-fe52623d-977a-402e-854c-5db0551bfcd7 req-6b60a4b2-cca1-4de8-9f88-23a85ed9ef53 service nova] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Refreshing network info cache for port 4ee645ae-c20a-4320-bbd1-8806bfe20177 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 625.000639] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-746d1d57-868c-4078-989d-eda727c5b4a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.010588] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776657b1-c97c-4114-bf27-a9fbf3746004 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.021688] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.021913] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.022122] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "41ac9f9b-5cd3-4302-86ac-8ef7cae603b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.022323] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "41ac9f9b-5cd3-4302-86ac-8ef7cae603b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.022493] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "41ac9f9b-5cd3-4302-86ac-8ef7cae603b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.024759] env[61898]: INFO nova.compute.manager [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Terminating instance [ 625.036575] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0 could not be found. [ 625.036780] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 625.036957] env[61898]: INFO nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 625.037238] env[61898]: DEBUG oslo.service.loopingcall [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 625.037654] env[61898]: DEBUG nova.compute.manager [-] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 625.037752] env[61898]: DEBUG nova.network.neutron [-] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 625.051966] env[61898]: DEBUG nova.network.neutron [-] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.251407] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.912s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.252067] env[61898]: ERROR nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b9301970-192e-4924-b3dc-377b73f06130, please check neutron logs for more information. [ 625.252067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Traceback (most recent call last): [ 625.252067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 625.252067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] self.driver.spawn(context, instance, image_meta, [ 625.252067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 625.252067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 625.252067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 625.252067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] vm_ref = self.build_virtual_machine(instance, [ 625.252067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 625.252067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] vif_infos = vmwarevif.get_vif_info(self._session, [ 625.252067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] for vif in network_info: [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] return self._sync_wrapper(fn, *args, **kwargs) [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] self.wait() [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] self[:] = self._gt.wait() [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] return self._exit_event.wait() [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] result = hub.switch() [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 625.252380] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] return self.greenlet.switch() [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] result = function(*args, **kwargs) [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] return func(*args, **kwargs) [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] raise e [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] nwinfo = self.network_api.allocate_for_instance( [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] created_port_ids = self._update_ports_for_instance( [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] with excutils.save_and_reraise_exception(): [ 625.253067] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.253605] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] self.force_reraise() [ 625.253605] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.253605] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] raise self.value [ 625.253605] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 625.253605] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] updated_port = self._update_port( [ 625.253605] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.253605] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] _ensure_no_port_binding_failure(port) [ 625.253605] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.253605] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] raise exception.PortBindingFailed(port_id=port['id']) [ 625.253605] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] nova.exception.PortBindingFailed: Binding failed for port b9301970-192e-4924-b3dc-377b73f06130, please check neutron logs for more information. [ 625.253605] env[61898]: ERROR nova.compute.manager [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] [ 625.254148] env[61898]: DEBUG nova.compute.utils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Binding failed for port b9301970-192e-4924-b3dc-377b73f06130, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 625.254148] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.695s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.255325] env[61898]: INFO nova.compute.claims [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.259642] env[61898]: DEBUG nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Build of instance f2f968db-d4e2-451d-afe6-330196eba6c2 was re-scheduled: Binding failed for port b9301970-192e-4924-b3dc-377b73f06130, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 625.260108] env[61898]: DEBUG nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 625.260335] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Acquiring lock "refresh_cache-f2f968db-d4e2-451d-afe6-330196eba6c2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.260479] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Acquired lock "refresh_cache-f2f968db-d4e2-451d-afe6-330196eba6c2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.260636] env[61898]: DEBUG nova.network.neutron [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 625.520192] env[61898]: DEBUG nova.network.neutron [req-fe52623d-977a-402e-854c-5db0551bfcd7 req-6b60a4b2-cca1-4de8-9f88-23a85ed9ef53 service nova] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.528991] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "refresh_cache-41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.528991] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquired lock "refresh_cache-41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.528991] env[61898]: DEBUG nova.network.neutron [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 625.554736] env[61898]: DEBUG nova.network.neutron [-] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.593826] env[61898]: DEBUG nova.network.neutron [req-fe52623d-977a-402e-854c-5db0551bfcd7 req-6b60a4b2-cca1-4de8-9f88-23a85ed9ef53 service nova] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.778227] env[61898]: DEBUG nova.network.neutron [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.847639] env[61898]: DEBUG nova.network.neutron [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.048786] env[61898]: DEBUG nova.network.neutron [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.057086] env[61898]: INFO nova.compute.manager [-] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Took 1.02 seconds to deallocate network for instance. [ 626.063028] env[61898]: DEBUG nova.compute.claims [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 626.063028] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.096177] env[61898]: DEBUG oslo_concurrency.lockutils [req-fe52623d-977a-402e-854c-5db0551bfcd7 req-6b60a4b2-cca1-4de8-9f88-23a85ed9ef53 service nova] Releasing lock "refresh_cache-1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.096267] env[61898]: DEBUG nova.compute.manager [req-fe52623d-977a-402e-854c-5db0551bfcd7 req-6b60a4b2-cca1-4de8-9f88-23a85ed9ef53 service nova] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Received event network-vif-deleted-4ee645ae-c20a-4320-bbd1-8806bfe20177 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 626.099293] env[61898]: DEBUG nova.network.neutron [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.349666] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Releasing lock "refresh_cache-f2f968db-d4e2-451d-afe6-330196eba6c2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.350029] env[61898]: DEBUG nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 626.350229] env[61898]: DEBUG nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 626.350666] env[61898]: DEBUG nova.network.neutron [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 626.366591] env[61898]: DEBUG nova.network.neutron [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 626.604128] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b54bb80-cd59-4246-b68d-bb42becda21d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.606812] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Releasing lock "refresh_cache-41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.607262] env[61898]: DEBUG nova.compute.manager [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 626.607459] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 626.608880] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ac68ed-61bb-4845-bd03-b768feb14dc5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.617656] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac3f2a0-5e68-47f7-9a24-5a852221490d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.621037] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 626.621250] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1db6cbf2-6bc6-4f50-ab01-b7c15d66b54f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.652079] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7c9e2a-1694-428c-bcbc-22c5d6a624db {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.654299] env[61898]: DEBUG oslo_vmware.api [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 626.654299] env[61898]: value = "task-1240406" [ 626.654299] env[61898]: _type = "Task" [ 626.654299] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.660885] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80399f1-630e-414c-b69e-c54205a2e2b7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.668028] env[61898]: DEBUG oslo_vmware.api [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.676825] env[61898]: DEBUG nova.compute.provider_tree [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.870764] env[61898]: DEBUG nova.network.neutron [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.163704] env[61898]: DEBUG oslo_vmware.api [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240406, 'name': PowerOffVM_Task, 'duration_secs': 0.194833} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.163962] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 627.164146] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 627.164380] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b113fa71-a768-4c0d-ac3a-d5c2ca7b73ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.180798] env[61898]: DEBUG nova.scheduler.client.report [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 627.187835] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 627.188077] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 627.188273] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Deleting the datastore file [datastore2] 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 627.188502] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f046ad9d-2158-495e-9bc5-ef09416e12b0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.194145] env[61898]: DEBUG oslo_vmware.api [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for the task: (returnval){ [ 627.194145] env[61898]: value = "task-1240408" [ 627.194145] env[61898]: _type = "Task" [ 627.194145] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.202165] env[61898]: DEBUG oslo_vmware.api [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240408, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.373222] env[61898]: INFO nova.compute.manager [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] [instance: f2f968db-d4e2-451d-afe6-330196eba6c2] Took 1.02 seconds to deallocate network for instance. [ 627.685592] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.686416] env[61898]: DEBUG nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 627.689050] env[61898]: DEBUG oslo_concurrency.lockutils [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.637s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.689050] env[61898]: DEBUG nova.objects.instance [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61898) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 627.703842] env[61898]: DEBUG oslo_vmware.api [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Task: {'id': task-1240408, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12711} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.703986] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 627.704187] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 627.704359] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 627.704525] env[61898]: INFO nova.compute.manager [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Took 1.10 seconds to destroy the instance on the hypervisor. [ 627.704754] env[61898]: DEBUG oslo.service.loopingcall [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 627.704935] env[61898]: DEBUG nova.compute.manager [-] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 627.705043] env[61898]: DEBUG nova.network.neutron [-] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 627.720585] env[61898]: DEBUG nova.network.neutron [-] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.193937] env[61898]: DEBUG nova.compute.utils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 628.198426] env[61898]: DEBUG nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 628.198426] env[61898]: DEBUG nova.network.neutron [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 628.223931] env[61898]: DEBUG nova.network.neutron [-] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.248526] env[61898]: DEBUG nova.policy [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f57307029ad249cfa2f9f1fb4b65bd98', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1d3355cf485445ca934f36e02fe191f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 628.408306] env[61898]: INFO nova.scheduler.client.report [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Deleted allocations for instance f2f968db-d4e2-451d-afe6-330196eba6c2 [ 628.526635] env[61898]: DEBUG nova.network.neutron [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Successfully created port: c14be22f-8866-4421-8d39-1c5806e94592 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 628.698562] env[61898]: DEBUG nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 628.705137] env[61898]: DEBUG oslo_concurrency.lockutils [None req-29223c78-f4f9-4098-a667-bda4fa545536 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.706784] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.468s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.707804] env[61898]: INFO nova.compute.claims [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.726191] env[61898]: INFO nova.compute.manager [-] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Took 1.02 seconds to deallocate network for instance. [ 628.919226] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7795c3b5-83aa-444e-9a00-ba5628e100d9 tempest-ServersTestJSON-1225005036 tempest-ServersTestJSON-1225005036-project-member] Lock "f2f968db-d4e2-451d-afe6-330196eba6c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.932s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.232957] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.283321] env[61898]: DEBUG nova.compute.manager [req-aee804f0-3ffb-4811-81d3-39f333e79eda req-e4c1d494-8207-42ab-8368-dc961558d984 service nova] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Received event network-changed-c14be22f-8866-4421-8d39-1c5806e94592 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 629.283726] env[61898]: DEBUG nova.compute.manager [req-aee804f0-3ffb-4811-81d3-39f333e79eda req-e4c1d494-8207-42ab-8368-dc961558d984 service nova] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Refreshing instance network info cache due to event network-changed-c14be22f-8866-4421-8d39-1c5806e94592. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 629.284280] env[61898]: DEBUG oslo_concurrency.lockutils [req-aee804f0-3ffb-4811-81d3-39f333e79eda req-e4c1d494-8207-42ab-8368-dc961558d984 service nova] Acquiring lock "refresh_cache-1f7b6f74-24c1-4db1-9f70-350f307a07b4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.284491] env[61898]: DEBUG oslo_concurrency.lockutils [req-aee804f0-3ffb-4811-81d3-39f333e79eda req-e4c1d494-8207-42ab-8368-dc961558d984 service nova] Acquired lock "refresh_cache-1f7b6f74-24c1-4db1-9f70-350f307a07b4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.284684] env[61898]: DEBUG nova.network.neutron [req-aee804f0-3ffb-4811-81d3-39f333e79eda req-e4c1d494-8207-42ab-8368-dc961558d984 service nova] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Refreshing network info cache for port c14be22f-8866-4421-8d39-1c5806e94592 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 629.366683] env[61898]: ERROR nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c14be22f-8866-4421-8d39-1c5806e94592, please check neutron logs for more information. [ 629.366683] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 629.366683] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 629.366683] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 629.366683] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 629.366683] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 629.366683] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 629.366683] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 629.366683] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.366683] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 629.366683] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.366683] env[61898]: ERROR nova.compute.manager raise self.value [ 629.366683] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 629.366683] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 629.366683] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.366683] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 629.367154] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.367154] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 629.367154] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c14be22f-8866-4421-8d39-1c5806e94592, please check neutron logs for more information. [ 629.367154] env[61898]: ERROR nova.compute.manager [ 629.367154] env[61898]: Traceback (most recent call last): [ 629.367154] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 629.367154] env[61898]: listener.cb(fileno) [ 629.367154] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.367154] env[61898]: result = function(*args, **kwargs) [ 629.367154] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 629.367154] env[61898]: return func(*args, **kwargs) [ 629.367154] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 629.367154] env[61898]: raise e [ 629.367154] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 629.367154] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 629.367154] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 629.367154] env[61898]: created_port_ids = self._update_ports_for_instance( [ 629.367154] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 629.367154] env[61898]: with excutils.save_and_reraise_exception(): [ 629.367154] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.367154] env[61898]: self.force_reraise() [ 629.367154] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.367154] env[61898]: raise self.value [ 629.367154] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 629.367154] env[61898]: updated_port = self._update_port( [ 629.367154] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.367154] env[61898]: _ensure_no_port_binding_failure(port) [ 629.367154] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.367154] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 629.367923] env[61898]: nova.exception.PortBindingFailed: Binding failed for port c14be22f-8866-4421-8d39-1c5806e94592, please check neutron logs for more information. [ 629.367923] env[61898]: Removing descriptor: 20 [ 629.421094] env[61898]: DEBUG nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 629.716147] env[61898]: DEBUG nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 629.745444] env[61898]: DEBUG nova.virt.hardware [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:55:11Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='c6f50fc4-d59b-45c9-a874-6b80133abadd',id=32,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1150198827',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 629.745777] env[61898]: DEBUG nova.virt.hardware [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 629.745953] env[61898]: DEBUG nova.virt.hardware [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 629.746169] env[61898]: DEBUG nova.virt.hardware [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 629.746321] env[61898]: DEBUG nova.virt.hardware [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 629.746467] env[61898]: DEBUG nova.virt.hardware [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 629.746673] env[61898]: DEBUG nova.virt.hardware [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 629.746835] env[61898]: DEBUG nova.virt.hardware [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 629.747015] env[61898]: DEBUG nova.virt.hardware [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 629.747190] env[61898]: DEBUG nova.virt.hardware [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 629.747359] env[61898]: DEBUG nova.virt.hardware [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 629.748440] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09bb51c-1e50-40ee-9b58-5f7d8b14d77a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.759235] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fb4fa2-4d65-4136-b587-d280cd410669 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.775956] env[61898]: ERROR nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c14be22f-8866-4421-8d39-1c5806e94592, please check neutron logs for more information. [ 629.775956] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Traceback (most recent call last): [ 629.775956] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 629.775956] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] yield resources [ 629.775956] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 629.775956] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] self.driver.spawn(context, instance, image_meta, [ 629.775956] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 629.775956] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 629.775956] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 629.775956] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] vm_ref = self.build_virtual_machine(instance, [ 629.775956] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] for vif in network_info: [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] return self._sync_wrapper(fn, *args, **kwargs) [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] self.wait() [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] self[:] = self._gt.wait() [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] return self._exit_event.wait() [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 629.776356] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] current.throw(*self._exc) [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] result = function(*args, **kwargs) [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] return func(*args, **kwargs) [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] raise e [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] nwinfo = self.network_api.allocate_for_instance( [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] created_port_ids = self._update_ports_for_instance( [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] with excutils.save_and_reraise_exception(): [ 629.776823] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.777227] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] self.force_reraise() [ 629.777227] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.777227] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] raise self.value [ 629.777227] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 629.777227] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] updated_port = self._update_port( [ 629.777227] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.777227] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] _ensure_no_port_binding_failure(port) [ 629.777227] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.777227] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] raise exception.PortBindingFailed(port_id=port['id']) [ 629.777227] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] nova.exception.PortBindingFailed: Binding failed for port c14be22f-8866-4421-8d39-1c5806e94592, please check neutron logs for more information. [ 629.777227] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] [ 629.777227] env[61898]: INFO nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Terminating instance [ 629.916888] env[61898]: DEBUG nova.network.neutron [req-aee804f0-3ffb-4811-81d3-39f333e79eda req-e4c1d494-8207-42ab-8368-dc961558d984 service nova] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.942420] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.004557] env[61898]: DEBUG nova.network.neutron [req-aee804f0-3ffb-4811-81d3-39f333e79eda req-e4c1d494-8207-42ab-8368-dc961558d984 service nova] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.106924] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc5dc71-2202-44e9-97ed-e41031dba125 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.114846] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12f19ec-f119-4257-82b2-db9aca714316 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.145569] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a62fc3a-5eff-400c-828e-705134bdbef2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.155800] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49411a62-4f21-4ba4-899c-71984d197fb4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.171436] env[61898]: DEBUG nova.compute.provider_tree [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.285160] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquiring lock "refresh_cache-1f7b6f74-24c1-4db1-9f70-350f307a07b4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 630.509335] env[61898]: DEBUG oslo_concurrency.lockutils [req-aee804f0-3ffb-4811-81d3-39f333e79eda req-e4c1d494-8207-42ab-8368-dc961558d984 service nova] Releasing lock "refresh_cache-1f7b6f74-24c1-4db1-9f70-350f307a07b4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.509335] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquired lock "refresh_cache-1f7b6f74-24c1-4db1-9f70-350f307a07b4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.509335] env[61898]: DEBUG nova.network.neutron [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 630.674295] env[61898]: DEBUG nova.scheduler.client.report [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 631.028114] env[61898]: DEBUG nova.network.neutron [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 631.078489] env[61898]: DEBUG nova.network.neutron [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.179293] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.180029] env[61898]: DEBUG nova.compute.manager [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 631.182792] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.690s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.338859] env[61898]: DEBUG nova.compute.manager [req-1eec66fb-5294-4b79-9600-926f34731fb1 req-d860193d-ea7d-47ff-8a82-3ad7ba8ff131 service nova] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Received event network-vif-deleted-c14be22f-8866-4421-8d39-1c5806e94592 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 631.581354] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Releasing lock "refresh_cache-1f7b6f74-24c1-4db1-9f70-350f307a07b4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.581786] env[61898]: DEBUG nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 631.581983] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 631.582386] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df43040d-21d9-481b-9259-63a7f8167914 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.591741] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129b39e0-e43a-4f97-b272-edb43d0f530d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.613262] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1f7b6f74-24c1-4db1-9f70-350f307a07b4 could not be found. [ 631.613489] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 631.613676] env[61898]: INFO nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Took 0.03 seconds to destroy the instance on the hypervisor. [ 631.613908] env[61898]: DEBUG oslo.service.loopingcall [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 631.614128] env[61898]: DEBUG nova.compute.manager [-] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 631.614221] env[61898]: DEBUG nova.network.neutron [-] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 631.635704] env[61898]: DEBUG nova.network.neutron [-] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 631.689335] env[61898]: DEBUG nova.compute.utils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.691987] env[61898]: DEBUG nova.compute.manager [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Not allocating networking since 'none' was specified. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 632.130247] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc06810-fe44-4a06-ba07-8bac5f16cac4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.137695] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f5a0b6-d9c6-4e01-bca5-cd57ea086f43 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.142263] env[61898]: DEBUG nova.network.neutron [-] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.170209] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a9035d-f87d-46fa-9395-5b34d31d4ad8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.178325] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52e57af-10ed-401f-909f-18669db73105 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.192109] env[61898]: DEBUG nova.compute.provider_tree [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.193283] env[61898]: DEBUG nova.compute.manager [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 632.645185] env[61898]: INFO nova.compute.manager [-] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Took 1.03 seconds to deallocate network for instance. [ 632.649921] env[61898]: DEBUG nova.compute.claims [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 632.650257] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.697474] env[61898]: DEBUG nova.scheduler.client.report [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 633.205514] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.023s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.206294] env[61898]: ERROR nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3, please check neutron logs for more information. [ 633.206294] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Traceback (most recent call last): [ 633.206294] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 633.206294] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] self.driver.spawn(context, instance, image_meta, [ 633.206294] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 633.206294] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.206294] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.206294] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] vm_ref = self.build_virtual_machine(instance, [ 633.206294] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.206294] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.206294] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] for vif in network_info: [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] return self._sync_wrapper(fn, *args, **kwargs) [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] self.wait() [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] self[:] = self._gt.wait() [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] return self._exit_event.wait() [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] result = hub.switch() [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 633.206948] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] return self.greenlet.switch() [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] result = function(*args, **kwargs) [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] return func(*args, **kwargs) [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] raise e [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] nwinfo = self.network_api.allocate_for_instance( [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] created_port_ids = self._update_ports_for_instance( [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] with excutils.save_and_reraise_exception(): [ 633.207320] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.207645] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] self.force_reraise() [ 633.207645] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.207645] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] raise self.value [ 633.207645] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 633.207645] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] updated_port = self._update_port( [ 633.207645] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.207645] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] _ensure_no_port_binding_failure(port) [ 633.207645] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.207645] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] raise exception.PortBindingFailed(port_id=port['id']) [ 633.207645] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] nova.exception.PortBindingFailed: Binding failed for port f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3, please check neutron logs for more information. [ 633.207645] env[61898]: ERROR nova.compute.manager [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] [ 633.207898] env[61898]: DEBUG nova.compute.utils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Binding failed for port f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 633.209905] env[61898]: DEBUG nova.compute.manager [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 633.212625] env[61898]: DEBUG nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Build of instance 69ad75e8-dcfc-499a-8f18-bf38575968be was re-scheduled: Binding failed for port f72ef4a5-6b3e-45f2-8bdc-4970df9eafc3, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 633.213393] env[61898]: DEBUG nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 633.213476] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "refresh_cache-69ad75e8-dcfc-499a-8f18-bf38575968be" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.213592] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquired lock "refresh_cache-69ad75e8-dcfc-499a-8f18-bf38575968be" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.213822] env[61898]: DEBUG nova.network.neutron [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 633.214928] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.170s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.238494] env[61898]: DEBUG nova.virt.hardware [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 633.238781] env[61898]: DEBUG nova.virt.hardware [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 633.238961] env[61898]: DEBUG nova.virt.hardware [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.239211] env[61898]: DEBUG nova.virt.hardware [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 633.239406] env[61898]: DEBUG nova.virt.hardware [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.239567] env[61898]: DEBUG nova.virt.hardware [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 633.239822] env[61898]: DEBUG nova.virt.hardware [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 633.240061] env[61898]: DEBUG nova.virt.hardware [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 633.240273] env[61898]: DEBUG nova.virt.hardware [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 633.240455] env[61898]: DEBUG nova.virt.hardware [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 633.240695] env[61898]: DEBUG nova.virt.hardware [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 633.241915] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39006ec-bda5-4bab-88e4-b2e477c24e1c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.251505] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69742bd3-3800-4db7-a033-9bb537ad95ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.267495] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 633.273379] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Creating folder: Project (dcabd00bf0af4fbfa1ff0c580e9f446e). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.273885] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-977b1af1-ec21-4606-a038-32e7be381863 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.284551] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Created folder: Project (dcabd00bf0af4fbfa1ff0c580e9f446e) in parent group-v267550. [ 633.284730] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Creating folder: Instances. Parent ref: group-v267568. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.284937] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a0cb568-2f92-4a31-91ce-6de867fa601d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.294666] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Created folder: Instances in parent group-v267568. [ 633.294879] env[61898]: DEBUG oslo.service.loopingcall [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 633.295063] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 633.295247] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3adccac5-13f7-4039-b1c2-ba4d9d9cc2ad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.310391] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 633.310391] env[61898]: value = "task-1240411" [ 633.310391] env[61898]: _type = "Task" [ 633.310391] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.317629] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240411, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.738787] env[61898]: DEBUG nova.network.neutron [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.813461] env[61898]: DEBUG nova.network.neutron [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.823219] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240411, 'name': CreateVM_Task, 'duration_secs': 0.250637} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.825873] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 633.826476] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.826648] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.826942] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 633.827438] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-629129c4-fd03-4c68-8404-147048897749 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.831995] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 633.831995] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5292059f-f92e-4946-6b46-02dd1f498fe7" [ 633.831995] env[61898]: _type = "Task" [ 633.831995] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.840260] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5292059f-f92e-4946-6b46-02dd1f498fe7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.155688] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835530cf-e224-47fd-b0bd-de3fc7a78413 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.163292] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f05ded-fcb3-4342-bfc4-28a007e6db5e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.193195] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea9be28-d3e0-41a1-bbac-3e11b2745406 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.200794] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33278818-ac73-4778-bd8f-87e81aa91c21 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.215551] env[61898]: DEBUG nova.compute.provider_tree [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.318389] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Releasing lock "refresh_cache-69ad75e8-dcfc-499a-8f18-bf38575968be" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.318634] env[61898]: DEBUG nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 634.318816] env[61898]: DEBUG nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 634.318981] env[61898]: DEBUG nova.network.neutron [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 634.333938] env[61898]: DEBUG nova.network.neutron [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.344759] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5292059f-f92e-4946-6b46-02dd1f498fe7, 'name': SearchDatastore_Task, 'duration_secs': 0.009218} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.345555] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.345781] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 634.346008] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 634.346162] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.346337] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 634.346808] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-776d2503-d713-4fd2-9c36-c81e375cd21d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.354313] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 634.354484] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 634.355151] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d90c4b8-79fa-49ff-9cad-3337b20adad6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.360129] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 634.360129] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]522bfbf1-3e13-0c6a-7dcb-39fb1cb5217e" [ 634.360129] env[61898]: _type = "Task" [ 634.360129] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.367640] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522bfbf1-3e13-0c6a-7dcb-39fb1cb5217e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.718799] env[61898]: DEBUG nova.scheduler.client.report [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 634.839939] env[61898]: DEBUG nova.network.neutron [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.870833] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522bfbf1-3e13-0c6a-7dcb-39fb1cb5217e, 'name': SearchDatastore_Task, 'duration_secs': 0.007793} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.871611] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b06885c-3720-452f-a4bc-2b6c82df5505 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.876845] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 634.876845] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5251fecc-b7ac-e998-8def-293741772fa7" [ 634.876845] env[61898]: _type = "Task" [ 634.876845] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.885235] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5251fecc-b7ac-e998-8def-293741772fa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.224949] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.010s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.225658] env[61898]: ERROR nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 47837c36-a959-4078-8926-ab473335dca5, please check neutron logs for more information. [ 635.225658] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] Traceback (most recent call last): [ 635.225658] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 635.225658] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] self.driver.spawn(context, instance, image_meta, [ 635.225658] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 635.225658] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.225658] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 635.225658] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] vm_ref = self.build_virtual_machine(instance, [ 635.225658] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 635.225658] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] vif_infos = vmwarevif.get_vif_info(self._session, [ 635.225658] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] for vif in network_info: [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] return self._sync_wrapper(fn, *args, **kwargs) [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] self.wait() [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] self[:] = self._gt.wait() [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] return self._exit_event.wait() [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] result = hub.switch() [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.225994] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] return self.greenlet.switch() [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] result = function(*args, **kwargs) [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] return func(*args, **kwargs) [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] raise e [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] nwinfo = self.network_api.allocate_for_instance( [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] created_port_ids = self._update_ports_for_instance( [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] with excutils.save_and_reraise_exception(): [ 635.226345] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.226703] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] self.force_reraise() [ 635.226703] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.226703] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] raise self.value [ 635.226703] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 635.226703] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] updated_port = self._update_port( [ 635.226703] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.226703] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] _ensure_no_port_binding_failure(port) [ 635.226703] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.226703] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] raise exception.PortBindingFailed(port_id=port['id']) [ 635.226703] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] nova.exception.PortBindingFailed: Binding failed for port 47837c36-a959-4078-8926-ab473335dca5, please check neutron logs for more information. [ 635.226703] env[61898]: ERROR nova.compute.manager [instance: 759d1958-0518-4654-8686-38be0920c29f] [ 635.227016] env[61898]: DEBUG nova.compute.utils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Binding failed for port 47837c36-a959-4078-8926-ab473335dca5, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 635.227710] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.153s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.229284] env[61898]: INFO nova.compute.claims [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.232254] env[61898]: DEBUG nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Build of instance 759d1958-0518-4654-8686-38be0920c29f was re-scheduled: Binding failed for port 47837c36-a959-4078-8926-ab473335dca5, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 635.232999] env[61898]: DEBUG nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 635.232999] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "refresh_cache-759d1958-0518-4654-8686-38be0920c29f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.232999] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquired lock "refresh_cache-759d1958-0518-4654-8686-38be0920c29f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.233190] env[61898]: DEBUG nova.network.neutron [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 635.343792] env[61898]: INFO nova.compute.manager [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: 69ad75e8-dcfc-499a-8f18-bf38575968be] Took 1.02 seconds to deallocate network for instance. [ 635.389155] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5251fecc-b7ac-e998-8def-293741772fa7, 'name': SearchDatastore_Task, 'duration_secs': 0.009108} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.389413] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.389663] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 23a0d825-3132-44d5-8b73-a06a0c0e7b1a/23a0d825-3132-44d5-8b73-a06a0c0e7b1a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 635.389904] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4e4526e-616a-4757-94c0-7bddb02e5f79 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.396777] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 635.396777] env[61898]: value = "task-1240412" [ 635.396777] env[61898]: _type = "Task" [ 635.396777] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.404140] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240412, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.846303] env[61898]: DEBUG nova.network.neutron [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.907250] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240412, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439824} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.907523] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 23a0d825-3132-44d5-8b73-a06a0c0e7b1a/23a0d825-3132-44d5-8b73-a06a0c0e7b1a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 635.907729] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 635.907962] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b56d45a9-6e52-4088-a962-4ecb4833b618 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.914167] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 635.914167] env[61898]: value = "task-1240413" [ 635.914167] env[61898]: _type = "Task" [ 635.914167] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.922550] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240413, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.927526] env[61898]: DEBUG nova.network.neutron [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.370402] env[61898]: INFO nova.scheduler.client.report [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Deleted allocations for instance 69ad75e8-dcfc-499a-8f18-bf38575968be [ 636.423218] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240413, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094318} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.425587] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 636.426787] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9120c9-a4b3-4268-9fc9-6059dc7d3f79 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.429318] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Releasing lock "refresh_cache-759d1958-0518-4654-8686-38be0920c29f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.429526] env[61898]: DEBUG nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 636.429701] env[61898]: DEBUG nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 636.429863] env[61898]: DEBUG nova.network.neutron [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 636.449679] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 23a0d825-3132-44d5-8b73-a06a0c0e7b1a/23a0d825-3132-44d5-8b73-a06a0c0e7b1a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 636.452799] env[61898]: DEBUG nova.network.neutron [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 636.454067] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99abfc8f-dd0d-4a54-aef2-762d6ca7ed31 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.474091] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 636.474091] env[61898]: value = "task-1240414" [ 636.474091] env[61898]: _type = "Task" [ 636.474091] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.484631] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240414, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.696967] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc2b783-8b35-4472-8fb8-e6c9c5324d58 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.705498] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df785e5a-a614-4d5a-ad7d-368888092448 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.738610] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785c5d4c-7985-4c3f-9a3b-d3ea1131ec7e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.745518] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f5597c-467d-4c09-bac1-71da8aae1d31 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.758673] env[61898]: DEBUG nova.compute.provider_tree [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.881568] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e597b187-71e4-4ccd-8798-429903aac32f tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "69ad75e8-dcfc-499a-8f18-bf38575968be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.589s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.969503] env[61898]: DEBUG nova.network.neutron [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.983907] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240414, 'name': ReconfigVM_Task, 'duration_secs': 0.289347} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.984766] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 23a0d825-3132-44d5-8b73-a06a0c0e7b1a/23a0d825-3132-44d5-8b73-a06a0c0e7b1a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 636.985405] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dafe4089-f8ba-4c74-bca6-0032a44a82cf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.991954] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 636.991954] env[61898]: value = "task-1240415" [ 636.991954] env[61898]: _type = "Task" [ 636.991954] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.000100] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240415, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.261882] env[61898]: DEBUG nova.scheduler.client.report [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 637.384543] env[61898]: DEBUG nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 637.474081] env[61898]: INFO nova.compute.manager [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 759d1958-0518-4654-8686-38be0920c29f] Took 1.04 seconds to deallocate network for instance. [ 637.502259] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240415, 'name': Rename_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.767014] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.539s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.767660] env[61898]: DEBUG nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 637.770844] env[61898]: DEBUG oslo_concurrency.lockutils [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.331s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.771065] env[61898]: DEBUG nova.objects.instance [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61898) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 637.902444] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.002409] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240415, 'name': Rename_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.276972] env[61898]: DEBUG nova.compute.utils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 638.282321] env[61898]: DEBUG nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 638.282498] env[61898]: DEBUG nova.network.neutron [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 638.324107] env[61898]: DEBUG nova.policy [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02f8a8e77d6e44bc844b916e2dcb913c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e04d71b6dab84c6196cd869f22e956eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 638.504797] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240415, 'name': Rename_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.507904] env[61898]: INFO nova.scheduler.client.report [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Deleted allocations for instance 759d1958-0518-4654-8686-38be0920c29f [ 638.602142] env[61898]: DEBUG nova.network.neutron [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Successfully created port: 3d709c30-22ff-466c-9489-a58fc2d8f251 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 638.785687] env[61898]: DEBUG oslo_concurrency.lockutils [None req-da03a4bd-9e55-428e-b293-cbcce9aa504a tempest-ServersAdmin275Test-451488668 tempest-ServersAdmin275Test-451488668-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.785687] env[61898]: DEBUG nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 638.788811] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.496s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.790386] env[61898]: INFO nova.compute.claims [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 639.005215] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240415, 'name': Rename_Task, 'duration_secs': 1.845007} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.005559] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 639.005894] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93bf9659-00b1-4dd2-bfc5-5cdb1f235f84 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.012318] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 639.012318] env[61898]: value = "task-1240416" [ 639.012318] env[61898]: _type = "Task" [ 639.012318] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.015883] env[61898]: DEBUG oslo_concurrency.lockutils [None req-92a74e06-6482-4a86-832d-69783058db0f tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "759d1958-0518-4654-8686-38be0920c29f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.129s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.023114] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240416, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.395880] env[61898]: DEBUG nova.compute.manager [req-8292a484-79a7-4ae6-a1d4-32a8b01f620c req-216d21f0-7690-4313-89d4-a71e491fef9e service nova] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Received event network-changed-3d709c30-22ff-466c-9489-a58fc2d8f251 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 639.396107] env[61898]: DEBUG nova.compute.manager [req-8292a484-79a7-4ae6-a1d4-32a8b01f620c req-216d21f0-7690-4313-89d4-a71e491fef9e service nova] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Refreshing instance network info cache due to event network-changed-3d709c30-22ff-466c-9489-a58fc2d8f251. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 639.396330] env[61898]: DEBUG oslo_concurrency.lockutils [req-8292a484-79a7-4ae6-a1d4-32a8b01f620c req-216d21f0-7690-4313-89d4-a71e491fef9e service nova] Acquiring lock "refresh_cache-082fe687-5038-4c31-9b27-f8a5c548cdc1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.396469] env[61898]: DEBUG oslo_concurrency.lockutils [req-8292a484-79a7-4ae6-a1d4-32a8b01f620c req-216d21f0-7690-4313-89d4-a71e491fef9e service nova] Acquired lock "refresh_cache-082fe687-5038-4c31-9b27-f8a5c548cdc1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.396629] env[61898]: DEBUG nova.network.neutron [req-8292a484-79a7-4ae6-a1d4-32a8b01f620c req-216d21f0-7690-4313-89d4-a71e491fef9e service nova] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Refreshing network info cache for port 3d709c30-22ff-466c-9489-a58fc2d8f251 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 639.462421] env[61898]: ERROR nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3d709c30-22ff-466c-9489-a58fc2d8f251, please check neutron logs for more information. [ 639.462421] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 639.462421] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 639.462421] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 639.462421] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 639.462421] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 639.462421] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 639.462421] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 639.462421] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 639.462421] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 639.462421] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 639.462421] env[61898]: ERROR nova.compute.manager raise self.value [ 639.462421] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 639.462421] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 639.462421] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 639.462421] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 639.462866] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 639.462866] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 639.462866] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3d709c30-22ff-466c-9489-a58fc2d8f251, please check neutron logs for more information. [ 639.462866] env[61898]: ERROR nova.compute.manager [ 639.462866] env[61898]: Traceback (most recent call last): [ 639.462866] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 639.462866] env[61898]: listener.cb(fileno) [ 639.462866] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 639.462866] env[61898]: result = function(*args, **kwargs) [ 639.462866] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 639.462866] env[61898]: return func(*args, **kwargs) [ 639.462866] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 639.462866] env[61898]: raise e [ 639.462866] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 639.462866] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 639.462866] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 639.462866] env[61898]: created_port_ids = self._update_ports_for_instance( [ 639.462866] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 639.462866] env[61898]: with excutils.save_and_reraise_exception(): [ 639.462866] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 639.462866] env[61898]: self.force_reraise() [ 639.462866] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 639.462866] env[61898]: raise self.value [ 639.462866] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 639.462866] env[61898]: updated_port = self._update_port( [ 639.462866] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 639.462866] env[61898]: _ensure_no_port_binding_failure(port) [ 639.462866] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 639.462866] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 639.463660] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 3d709c30-22ff-466c-9489-a58fc2d8f251, please check neutron logs for more information. [ 639.463660] env[61898]: Removing descriptor: 20 [ 639.518246] env[61898]: DEBUG nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 639.524305] env[61898]: DEBUG oslo_vmware.api [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240416, 'name': PowerOnVM_Task, 'duration_secs': 0.437094} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.524657] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 639.524857] env[61898]: INFO nova.compute.manager [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Took 6.31 seconds to spawn the instance on the hypervisor. [ 639.525051] env[61898]: DEBUG nova.compute.manager [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 639.525802] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4eb5da1-ecc0-490c-81b8-1c90d18e2dff {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.796451] env[61898]: DEBUG nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 639.816622] env[61898]: DEBUG nova.virt.hardware [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 639.817324] env[61898]: DEBUG nova.virt.hardware [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 639.817324] env[61898]: DEBUG nova.virt.hardware [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.817504] env[61898]: DEBUG nova.virt.hardware [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 639.817696] env[61898]: DEBUG nova.virt.hardware [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.818350] env[61898]: DEBUG nova.virt.hardware [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 639.818431] env[61898]: DEBUG nova.virt.hardware [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 639.818722] env[61898]: DEBUG nova.virt.hardware [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 639.819553] env[61898]: DEBUG nova.virt.hardware [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 639.819553] env[61898]: DEBUG nova.virt.hardware [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 639.819553] env[61898]: DEBUG nova.virt.hardware [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 639.820501] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97031284-66bf-49f6-b9a4-9437818be66c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.835955] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2919475-b9ea-42fc-95e1-e7baec0d66ff {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.855345] env[61898]: ERROR nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3d709c30-22ff-466c-9489-a58fc2d8f251, please check neutron logs for more information. [ 639.855345] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Traceback (most recent call last): [ 639.855345] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 639.855345] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] yield resources [ 639.855345] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 639.855345] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] self.driver.spawn(context, instance, image_meta, [ 639.855345] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 639.855345] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 639.855345] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 639.855345] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] vm_ref = self.build_virtual_machine(instance, [ 639.855345] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] vif_infos = vmwarevif.get_vif_info(self._session, [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] for vif in network_info: [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] return self._sync_wrapper(fn, *args, **kwargs) [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] self.wait() [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] self[:] = self._gt.wait() [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] return self._exit_event.wait() [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 639.855730] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] current.throw(*self._exc) [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] result = function(*args, **kwargs) [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] return func(*args, **kwargs) [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] raise e [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] nwinfo = self.network_api.allocate_for_instance( [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] created_port_ids = self._update_ports_for_instance( [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] with excutils.save_and_reraise_exception(): [ 639.856060] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 639.856364] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] self.force_reraise() [ 639.856364] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 639.856364] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] raise self.value [ 639.856364] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 639.856364] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] updated_port = self._update_port( [ 639.856364] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 639.856364] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] _ensure_no_port_binding_failure(port) [ 639.856364] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 639.856364] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] raise exception.PortBindingFailed(port_id=port['id']) [ 639.856364] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] nova.exception.PortBindingFailed: Binding failed for port 3d709c30-22ff-466c-9489-a58fc2d8f251, please check neutron logs for more information. [ 639.856364] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] [ 639.856364] env[61898]: INFO nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Terminating instance [ 639.915570] env[61898]: DEBUG nova.network.neutron [req-8292a484-79a7-4ae6-a1d4-32a8b01f620c req-216d21f0-7690-4313-89d4-a71e491fef9e service nova] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 640.000800] env[61898]: DEBUG nova.network.neutron [req-8292a484-79a7-4ae6-a1d4-32a8b01f620c req-216d21f0-7690-4313-89d4-a71e491fef9e service nova] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.044148] env[61898]: INFO nova.compute.manager [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Took 29.83 seconds to build instance. [ 640.048733] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.218688] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0e65a9-3ddf-418c-9a45-0b17d9a90a9d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.227313] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6cdde0f-4791-4306-b372-0aca7e51b633 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.258791] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7059ab-6118-4d14-9b18-a2f0d3a9b217 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.266191] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd4d4d7-b8f9-4256-9538-c18813c5f978 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.280414] env[61898]: DEBUG nova.compute.provider_tree [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.365138] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquiring lock "refresh_cache-082fe687-5038-4c31-9b27-f8a5c548cdc1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.503264] env[61898]: DEBUG oslo_concurrency.lockutils [req-8292a484-79a7-4ae6-a1d4-32a8b01f620c req-216d21f0-7690-4313-89d4-a71e491fef9e service nova] Releasing lock "refresh_cache-082fe687-5038-4c31-9b27-f8a5c548cdc1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.503769] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquired lock "refresh_cache-082fe687-5038-4c31-9b27-f8a5c548cdc1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.503990] env[61898]: DEBUG nova.network.neutron [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 640.545415] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b59d26ec-39fe-44b3-ba12-4fdb8d32e7e8 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "23a0d825-3132-44d5-8b73-a06a0c0e7b1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.244s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.561891] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "34338563-05d4-477b-8480-6ef4cbf28e72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.562138] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "34338563-05d4-477b-8480-6ef4cbf28e72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.623099] env[61898]: INFO nova.compute.manager [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Rebuilding instance [ 640.667539] env[61898]: DEBUG nova.compute.manager [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 640.668425] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241a5c51-7641-4b1d-9cb0-6677a84eaf43 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.783842] env[61898]: DEBUG nova.scheduler.client.report [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 641.023179] env[61898]: DEBUG nova.network.neutron [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.047709] env[61898]: DEBUG nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 641.108530] env[61898]: DEBUG nova.network.neutron [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.291016] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.291016] env[61898]: DEBUG nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 641.291553] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.410s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.505755] env[61898]: DEBUG nova.compute.manager [req-da7ffe30-5068-425f-8ff2-c378fb34f33f req-5b48642c-c5c1-4e45-a4ee-ce9d1be2fec0 service nova] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Received event network-vif-deleted-3d709c30-22ff-466c-9489-a58fc2d8f251 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 641.570535] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.612198] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Releasing lock "refresh_cache-082fe687-5038-4c31-9b27-f8a5c548cdc1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.612198] env[61898]: DEBUG nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 641.612314] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 641.612539] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1e50337-f707-461e-9960-07238e99edfe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.625456] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42121c2b-56e3-476c-ba41-7c1c22d8ffe0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.649184] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 082fe687-5038-4c31-9b27-f8a5c548cdc1 could not be found. [ 641.649435] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 641.649617] env[61898]: INFO nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 641.649854] env[61898]: DEBUG oslo.service.loopingcall [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 641.650240] env[61898]: DEBUG nova.compute.manager [-] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 641.650240] env[61898]: DEBUG nova.network.neutron [-] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 641.665150] env[61898]: DEBUG nova.network.neutron [-] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 641.689573] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 641.689851] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae970567-cad7-4453-ac41-539a577ee1ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.698528] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 641.698528] env[61898]: value = "task-1240417" [ 641.698528] env[61898]: _type = "Task" [ 641.698528] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.708041] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240417, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.797053] env[61898]: DEBUG nova.compute.utils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 641.801771] env[61898]: DEBUG nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 641.801771] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 641.846458] env[61898]: DEBUG nova.policy [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3848d73334ff490696e92ac9da3a4a25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '294362a0c6b04039b589ae5eb0d341ee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 642.102114] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Successfully created port: 74cda50e-d23e-40f6-8e06-cbff437709e2 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.135249] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-602ab33f-9b95-4ddf-8257-7a97134fcba4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.142994] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056620f7-3ce0-4127-9556-044c83912f49 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.171520] env[61898]: DEBUG nova.network.neutron [-] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.173396] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb1c689-1806-4495-9a0a-9f9756f67eb5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.180903] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165ffb01-d49f-4d65-a408-14294d899490 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.194727] env[61898]: DEBUG nova.compute.provider_tree [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.208816] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240417, 'name': PowerOffVM_Task, 'duration_secs': 0.179741} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.209077] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 642.209592] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 642.210036] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3accfc85-9ca1-4f4b-b60d-f4acb208549a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.216646] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 642.217090] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7773e5bd-64e5-41d1-99c0-1fcafeb0989b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.244057] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 642.244057] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 642.244211] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Deleting the datastore file [datastore2] 23a0d825-3132-44d5-8b73-a06a0c0e7b1a {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 642.244445] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d196656c-9a6f-48f7-a41c-657a07212462 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.250487] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 642.250487] env[61898]: value = "task-1240419" [ 642.250487] env[61898]: _type = "Task" [ 642.250487] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.258597] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240419, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.303030] env[61898]: DEBUG nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 642.366159] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Successfully created port: ec060778-4d90-4628-bbbe-52f94fb9e1f9 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.620917] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Successfully created port: 3683272c-98b1-4638-b55d-1dd16dc3ac12 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.676585] env[61898]: INFO nova.compute.manager [-] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Took 1.03 seconds to deallocate network for instance. [ 642.678943] env[61898]: DEBUG nova.compute.claims [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 642.679142] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.697811] env[61898]: DEBUG nova.scheduler.client.report [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 642.767873] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240419, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102877} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.767873] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 642.768017] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 642.768181] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 643.207272] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.916s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.207963] env[61898]: ERROR nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 950f9b51-3d4f-4df9-98a0-aa4e423ec4ac, please check neutron logs for more information. [ 643.207963] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Traceback (most recent call last): [ 643.207963] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 643.207963] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] self.driver.spawn(context, instance, image_meta, [ 643.207963] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 643.207963] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 643.207963] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 643.207963] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] vm_ref = self.build_virtual_machine(instance, [ 643.207963] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 643.207963] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] vif_infos = vmwarevif.get_vif_info(self._session, [ 643.207963] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] for vif in network_info: [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] return self._sync_wrapper(fn, *args, **kwargs) [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] self.wait() [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] self[:] = self._gt.wait() [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] return self._exit_event.wait() [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] result = hub.switch() [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 643.208323] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] return self.greenlet.switch() [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] result = function(*args, **kwargs) [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] return func(*args, **kwargs) [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] raise e [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] nwinfo = self.network_api.allocate_for_instance( [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] created_port_ids = self._update_ports_for_instance( [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] with excutils.save_and_reraise_exception(): [ 643.208739] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.209125] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] self.force_reraise() [ 643.209125] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.209125] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] raise self.value [ 643.209125] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 643.209125] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] updated_port = self._update_port( [ 643.209125] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.209125] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] _ensure_no_port_binding_failure(port) [ 643.209125] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.209125] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] raise exception.PortBindingFailed(port_id=port['id']) [ 643.209125] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] nova.exception.PortBindingFailed: Binding failed for port 950f9b51-3d4f-4df9-98a0-aa4e423ec4ac, please check neutron logs for more information. [ 643.209125] env[61898]: ERROR nova.compute.manager [instance: cfb2f64b-7026-444d-8f86-500445343ac1] [ 643.209530] env[61898]: DEBUG nova.compute.utils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Binding failed for port 950f9b51-3d4f-4df9-98a0-aa4e423ec4ac, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 643.209972] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.294s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.210261] env[61898]: DEBUG nova.objects.instance [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lazy-loading 'resources' on Instance uuid 7ef91986-fb46-478b-85a5-05d597790ad9 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 643.211704] env[61898]: DEBUG nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Build of instance cfb2f64b-7026-444d-8f86-500445343ac1 was re-scheduled: Binding failed for port 950f9b51-3d4f-4df9-98a0-aa4e423ec4ac, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 643.212126] env[61898]: DEBUG nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 643.212346] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquiring lock "refresh_cache-cfb2f64b-7026-444d-8f86-500445343ac1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.214657] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Acquired lock "refresh_cache-cfb2f64b-7026-444d-8f86-500445343ac1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.214657] env[61898]: DEBUG nova.network.neutron [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 643.317049] env[61898]: DEBUG nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 643.345716] env[61898]: DEBUG nova.virt.hardware [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 643.345716] env[61898]: DEBUG nova.virt.hardware [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 643.345841] env[61898]: DEBUG nova.virt.hardware [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 643.346030] env[61898]: DEBUG nova.virt.hardware [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 643.346185] env[61898]: DEBUG nova.virt.hardware [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 643.346334] env[61898]: DEBUG nova.virt.hardware [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 643.346536] env[61898]: DEBUG nova.virt.hardware [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 643.346691] env[61898]: DEBUG nova.virt.hardware [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 643.346852] env[61898]: DEBUG nova.virt.hardware [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 643.347015] env[61898]: DEBUG nova.virt.hardware [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 643.347189] env[61898]: DEBUG nova.virt.hardware [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 643.348066] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d344a67-5a16-492b-ab24-ecc3e8d53621 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.356430] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac32e8c9-f559-4d0a-970b-cdf0aa80f2f2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.447386] env[61898]: ERROR nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 74cda50e-d23e-40f6-8e06-cbff437709e2, please check neutron logs for more information. [ 643.447386] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 643.447386] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 643.447386] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 643.447386] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 643.447386] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 643.447386] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 643.447386] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 643.447386] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.447386] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 643.447386] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.447386] env[61898]: ERROR nova.compute.manager raise self.value [ 643.447386] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 643.447386] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 643.447386] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.447386] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 643.447951] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.447951] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 643.447951] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 74cda50e-d23e-40f6-8e06-cbff437709e2, please check neutron logs for more information. [ 643.447951] env[61898]: ERROR nova.compute.manager [ 643.447951] env[61898]: Traceback (most recent call last): [ 643.447951] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 643.447951] env[61898]: listener.cb(fileno) [ 643.447951] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 643.447951] env[61898]: result = function(*args, **kwargs) [ 643.447951] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 643.447951] env[61898]: return func(*args, **kwargs) [ 643.447951] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 643.447951] env[61898]: raise e [ 643.447951] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 643.447951] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 643.447951] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 643.447951] env[61898]: created_port_ids = self._update_ports_for_instance( [ 643.447951] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 643.447951] env[61898]: with excutils.save_and_reraise_exception(): [ 643.447951] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.447951] env[61898]: self.force_reraise() [ 643.447951] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.447951] env[61898]: raise self.value [ 643.447951] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 643.447951] env[61898]: updated_port = self._update_port( [ 643.447951] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.447951] env[61898]: _ensure_no_port_binding_failure(port) [ 643.447951] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.447951] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 643.448824] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 74cda50e-d23e-40f6-8e06-cbff437709e2, please check neutron logs for more information. [ 643.448824] env[61898]: Removing descriptor: 20 [ 643.448824] env[61898]: ERROR nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 74cda50e-d23e-40f6-8e06-cbff437709e2, please check neutron logs for more information. [ 643.448824] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Traceback (most recent call last): [ 643.448824] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 643.448824] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] yield resources [ 643.448824] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 643.448824] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] self.driver.spawn(context, instance, image_meta, [ 643.448824] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 643.448824] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 643.448824] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 643.448824] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] vm_ref = self.build_virtual_machine(instance, [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] vif_infos = vmwarevif.get_vif_info(self._session, [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] for vif in network_info: [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] return self._sync_wrapper(fn, *args, **kwargs) [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] self.wait() [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] self[:] = self._gt.wait() [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] return self._exit_event.wait() [ 643.449192] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] result = hub.switch() [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] return self.greenlet.switch() [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] result = function(*args, **kwargs) [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] return func(*args, **kwargs) [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] raise e [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] nwinfo = self.network_api.allocate_for_instance( [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 643.449568] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] created_port_ids = self._update_ports_for_instance( [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] with excutils.save_and_reraise_exception(): [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] self.force_reraise() [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] raise self.value [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] updated_port = self._update_port( [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] _ensure_no_port_binding_failure(port) [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.449939] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] raise exception.PortBindingFailed(port_id=port['id']) [ 643.450310] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] nova.exception.PortBindingFailed: Binding failed for port 74cda50e-d23e-40f6-8e06-cbff437709e2, please check neutron logs for more information. [ 643.450310] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] [ 643.450310] env[61898]: INFO nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Terminating instance [ 643.533306] env[61898]: DEBUG nova.compute.manager [req-8776600d-3534-41c7-979b-c95331f01aaa req-d94bdfcc-73d8-49a0-819b-8e62e5b8bf4f service nova] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Received event network-changed-74cda50e-d23e-40f6-8e06-cbff437709e2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 643.533413] env[61898]: DEBUG nova.compute.manager [req-8776600d-3534-41c7-979b-c95331f01aaa req-d94bdfcc-73d8-49a0-819b-8e62e5b8bf4f service nova] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Refreshing instance network info cache due to event network-changed-74cda50e-d23e-40f6-8e06-cbff437709e2. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 643.533625] env[61898]: DEBUG oslo_concurrency.lockutils [req-8776600d-3534-41c7-979b-c95331f01aaa req-d94bdfcc-73d8-49a0-819b-8e62e5b8bf4f service nova] Acquiring lock "refresh_cache-c6e77597-5a5d-4b86-8588-7056828025cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.534176] env[61898]: DEBUG oslo_concurrency.lockutils [req-8776600d-3534-41c7-979b-c95331f01aaa req-d94bdfcc-73d8-49a0-819b-8e62e5b8bf4f service nova] Acquired lock "refresh_cache-c6e77597-5a5d-4b86-8588-7056828025cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.534496] env[61898]: DEBUG nova.network.neutron [req-8776600d-3534-41c7-979b-c95331f01aaa req-d94bdfcc-73d8-49a0-819b-8e62e5b8bf4f service nova] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Refreshing network info cache for port 74cda50e-d23e-40f6-8e06-cbff437709e2 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 643.731444] env[61898]: DEBUG nova.network.neutron [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 643.801991] env[61898]: DEBUG nova.virt.hardware [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 643.802258] env[61898]: DEBUG nova.virt.hardware [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 643.802414] env[61898]: DEBUG nova.virt.hardware [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 643.802597] env[61898]: DEBUG nova.virt.hardware [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 643.802739] env[61898]: DEBUG nova.virt.hardware [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 643.802882] env[61898]: DEBUG nova.virt.hardware [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 643.803099] env[61898]: DEBUG nova.virt.hardware [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 643.803364] env[61898]: DEBUG nova.virt.hardware [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 643.803432] env[61898]: DEBUG nova.virt.hardware [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 643.803590] env[61898]: DEBUG nova.virt.hardware [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 643.803761] env[61898]: DEBUG nova.virt.hardware [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 643.804618] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29433036-b57f-4dd5-b887-4d7b4beecb64 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.810961] env[61898]: DEBUG nova.network.neutron [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.815616] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a4fe26-31d1-4362-8c7f-74d4a7b64eda {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.834874] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 643.841140] env[61898]: DEBUG oslo.service.loopingcall [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 643.844133] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 643.844378] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51e603ad-042e-4fe9-9152-fea9c11b3592 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.865280] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 643.865280] env[61898]: value = "task-1240420" [ 643.865280] env[61898]: _type = "Task" [ 643.865280] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.875723] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240420, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.953819] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "refresh_cache-c6e77597-5a5d-4b86-8588-7056828025cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.077241] env[61898]: DEBUG nova.network.neutron [req-8776600d-3534-41c7-979b-c95331f01aaa req-d94bdfcc-73d8-49a0-819b-8e62e5b8bf4f service nova] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 644.137716] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5b204d-bc5d-49ba-a5cf-54f08538fa0f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.145039] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61b962c-87d4-42f8-96c2-6503a812e4a0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.175236] env[61898]: DEBUG nova.network.neutron [req-8776600d-3534-41c7-979b-c95331f01aaa req-d94bdfcc-73d8-49a0-819b-8e62e5b8bf4f service nova] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.176894] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc907ddc-adfd-4ebc-8795-5a2db9a9c949 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.185077] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb298558-6d31-4966-b2ba-90ba3fd61609 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.200137] env[61898]: DEBUG nova.compute.provider_tree [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.316105] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Releasing lock "refresh_cache-cfb2f64b-7026-444d-8f86-500445343ac1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.316417] env[61898]: DEBUG nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 644.316417] env[61898]: DEBUG nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 644.316572] env[61898]: DEBUG nova.network.neutron [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 644.331226] env[61898]: DEBUG nova.network.neutron [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 644.375284] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240420, 'name': CreateVM_Task, 'duration_secs': 0.25544} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.375471] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 644.375945] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.376142] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.376481] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 644.376749] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47023890-9887-4bca-9706-bf856d7b2fb9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.381310] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 644.381310] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f118d1-8b5c-5984-b9ea-659629aef126" [ 644.381310] env[61898]: _type = "Task" [ 644.381310] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.389075] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f118d1-8b5c-5984-b9ea-659629aef126, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.680630] env[61898]: DEBUG oslo_concurrency.lockutils [req-8776600d-3534-41c7-979b-c95331f01aaa req-d94bdfcc-73d8-49a0-819b-8e62e5b8bf4f service nova] Releasing lock "refresh_cache-c6e77597-5a5d-4b86-8588-7056828025cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.680943] env[61898]: DEBUG nova.compute.manager [req-8776600d-3534-41c7-979b-c95331f01aaa req-d94bdfcc-73d8-49a0-819b-8e62e5b8bf4f service nova] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Received event network-vif-deleted-74cda50e-d23e-40f6-8e06-cbff437709e2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 644.681328] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquired lock "refresh_cache-c6e77597-5a5d-4b86-8588-7056828025cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.681572] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 644.703307] env[61898]: DEBUG nova.scheduler.client.report [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 644.834126] env[61898]: DEBUG nova.network.neutron [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.891431] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f118d1-8b5c-5984-b9ea-659629aef126, 'name': SearchDatastore_Task, 'duration_secs': 0.008853} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.891726] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.891954] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 644.892214] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.892358] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.892533] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 644.892771] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72ff4a27-1187-4992-b6c3-0dcc0d4a9251 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.900482] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 644.900657] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 644.901346] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1b3e7a6-a028-4646-9e5a-4fdc2cf94fc1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.906118] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 644.906118] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]529fc12d-8472-88cf-fe8a-04eee7a4568d" [ 644.906118] env[61898]: _type = "Task" [ 644.906118] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.913732] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]529fc12d-8472-88cf-fe8a-04eee7a4568d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.208919] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.999s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.211958] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.583s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.215592] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.228106] env[61898]: INFO nova.scheduler.client.report [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Deleted allocations for instance 7ef91986-fb46-478b-85a5-05d597790ad9 [ 645.294736] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.336656] env[61898]: INFO nova.compute.manager [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] [instance: cfb2f64b-7026-444d-8f86-500445343ac1] Took 1.02 seconds to deallocate network for instance. [ 645.417028] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]529fc12d-8472-88cf-fe8a-04eee7a4568d, 'name': SearchDatastore_Task, 'duration_secs': 0.007958} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.417028] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93e53ce7-5b31-471a-a6c2-d5eaa57b47ba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.421934] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 645.421934] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525cf659-f7a0-8dc1-5680-91da39a7a5a8" [ 645.421934] env[61898]: _type = "Task" [ 645.421934] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.431142] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525cf659-f7a0-8dc1-5680-91da39a7a5a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.740116] env[61898]: DEBUG oslo_concurrency.lockutils [None req-344b0277-32e7-494d-9a5b-3c6882bf9e09 tempest-ServersAdmin275Test-565549473 tempest-ServersAdmin275Test-565549473-project-member] Lock "7ef91986-fb46-478b-85a5-05d597790ad9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.073s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.797395] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Releasing lock "refresh_cache-c6e77597-5a5d-4b86-8588-7056828025cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.797880] env[61898]: DEBUG nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 645.798138] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 645.798440] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c777a61b-ee1b-4d03-929d-716423b07573 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.809120] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689771a7-c6a3-4b8f-b199-ec2d4d4056c4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.834051] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c6e77597-5a5d-4b86-8588-7056828025cf could not be found. [ 645.834293] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 645.834481] env[61898]: INFO nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 645.834722] env[61898]: DEBUG oslo.service.loopingcall [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 645.837078] env[61898]: DEBUG nova.compute.manager [-] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 645.837177] env[61898]: DEBUG nova.network.neutron [-] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 645.928106] env[61898]: DEBUG nova.network.neutron [-] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.936030] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525cf659-f7a0-8dc1-5680-91da39a7a5a8, 'name': SearchDatastore_Task, 'duration_secs': 0.00918} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.936030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.936206] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 23a0d825-3132-44d5-8b73-a06a0c0e7b1a/23a0d825-3132-44d5-8b73-a06a0c0e7b1a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 645.937073] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d93995c6-2f2b-427a-a813-1edd9128022c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.944711] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 645.944711] env[61898]: value = "task-1240421" [ 645.944711] env[61898]: _type = "Task" [ 645.944711] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.953031] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240421, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.128625] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a0f479-2ecc-4f72-9b71-917fc6de8961 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.141265] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044b53f8-51d6-4b20-88b5-3f308da6e03f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.181912] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76e6f23-cbe3-4812-81b5-2a1ed7ebecaf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.196192] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51b6d3f-3c74-49ee-8d95-d615aa7c8064 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.212259] env[61898]: DEBUG nova.compute.provider_tree [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.377995] env[61898]: INFO nova.scheduler.client.report [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Deleted allocations for instance cfb2f64b-7026-444d-8f86-500445343ac1 [ 646.457109] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240421, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453219} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.457563] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 23a0d825-3132-44d5-8b73-a06a0c0e7b1a/23a0d825-3132-44d5-8b73-a06a0c0e7b1a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 646.457628] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 646.457833] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1bf66345-a52f-4804-9c65-24a5924ac52f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.466046] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 646.466046] env[61898]: value = "task-1240422" [ 646.466046] env[61898]: _type = "Task" [ 646.466046] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.474622] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240422, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.715481] env[61898]: DEBUG nova.scheduler.client.report [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 646.872919] env[61898]: DEBUG nova.network.neutron [-] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.888072] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e22f6e4-c08a-4e9c-a008-92b5f7ab7f7e tempest-ListServerFiltersTestJSON-231285373 tempest-ListServerFiltersTestJSON-231285373-project-member] Lock "cfb2f64b-7026-444d-8f86-500445343ac1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.590s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.976372] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240422, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063166} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.976796] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 646.977718] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5912d792-1e32-4a61-9a5e-ad4a2b5d7bde {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.997156] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 23a0d825-3132-44d5-8b73-a06a0c0e7b1a/23a0d825-3132-44d5-8b73-a06a0c0e7b1a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 646.997440] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b89db07-97a0-4193-b1fd-18444fb79a63 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.017028] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 647.017028] env[61898]: value = "task-1240423" [ 647.017028] env[61898]: _type = "Task" [ 647.017028] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.024886] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240423, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.220368] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.009s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.221195] env[61898]: ERROR nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7371fb07-a597-4fd5-8d37-e0669fc56203, please check neutron logs for more information. [ 647.221195] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Traceback (most recent call last): [ 647.221195] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 647.221195] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] self.driver.spawn(context, instance, image_meta, [ 647.221195] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 647.221195] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 647.221195] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 647.221195] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] vm_ref = self.build_virtual_machine(instance, [ 647.221195] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 647.221195] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] vif_infos = vmwarevif.get_vif_info(self._session, [ 647.221195] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] for vif in network_info: [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] return self._sync_wrapper(fn, *args, **kwargs) [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] self.wait() [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] self[:] = self._gt.wait() [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] return self._exit_event.wait() [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] current.throw(*self._exc) [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.221615] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] result = function(*args, **kwargs) [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] return func(*args, **kwargs) [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] raise e [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] nwinfo = self.network_api.allocate_for_instance( [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] created_port_ids = self._update_ports_for_instance( [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] with excutils.save_and_reraise_exception(): [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] self.force_reraise() [ 647.222041] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.222396] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] raise self.value [ 647.222396] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 647.222396] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] updated_port = self._update_port( [ 647.222396] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.222396] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] _ensure_no_port_binding_failure(port) [ 647.222396] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.222396] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] raise exception.PortBindingFailed(port_id=port['id']) [ 647.222396] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] nova.exception.PortBindingFailed: Binding failed for port 7371fb07-a597-4fd5-8d37-e0669fc56203, please check neutron logs for more information. [ 647.222396] env[61898]: ERROR nova.compute.manager [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] [ 647.222396] env[61898]: DEBUG nova.compute.utils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Binding failed for port 7371fb07-a597-4fd5-8d37-e0669fc56203, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 647.223520] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 22.525s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.223520] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.223520] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 647.223750] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.161s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.227575] env[61898]: DEBUG nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Build of instance f968f3df-c70b-466b-8aaa-879354f12d3b was re-scheduled: Binding failed for port 7371fb07-a597-4fd5-8d37-e0669fc56203, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 647.228098] env[61898]: DEBUG nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 647.228414] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Acquiring lock "refresh_cache-f968f3df-c70b-466b-8aaa-879354f12d3b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.228531] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Acquired lock "refresh_cache-f968f3df-c70b-466b-8aaa-879354f12d3b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.228740] env[61898]: DEBUG nova.network.neutron [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 647.230367] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74000cef-7f95-4885-b469-03cfefbff4bf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.239848] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f4d09c-d3df-41b8-885a-093662e6b7b4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.260546] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64465d61-0073-490f-9253-1862a9ee1bd3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.269448] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baef4ed5-b4d7-4899-9738-d5c7df0752a3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.300919] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181224MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 647.301094] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.376033] env[61898]: INFO nova.compute.manager [-] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Took 1.54 seconds to deallocate network for instance. [ 647.378314] env[61898]: DEBUG nova.compute.claims [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 647.378602] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.389995] env[61898]: DEBUG nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 647.526682] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240423, 'name': ReconfigVM_Task, 'duration_secs': 0.263745} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.526860] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 23a0d825-3132-44d5-8b73-a06a0c0e7b1a/23a0d825-3132-44d5-8b73-a06a0c0e7b1a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 647.527507] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b09e3925-9690-4cdc-824d-169aacec0c9d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.533737] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 647.533737] env[61898]: value = "task-1240424" [ 647.533737] env[61898]: _type = "Task" [ 647.533737] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.545654] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240424, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.758031] env[61898]: DEBUG nova.network.neutron [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.892802] env[61898]: DEBUG nova.network.neutron [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.912493] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.046634] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240424, 'name': Rename_Task, 'duration_secs': 0.213376} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.047057] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 648.047346] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc286dea-c200-4a09-825b-4490f3cb756b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.055744] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 648.055744] env[61898]: value = "task-1240425" [ 648.055744] env[61898]: _type = "Task" [ 648.055744] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.072769] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240425, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.103801] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571b8e30-63ac-4eaa-ac23-71e7ed45c2d5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.111239] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b850689b-1951-405b-88e5-fa54384949a9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.143980] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6be52c-4dd9-41bb-9430-d36b938cba8d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.151680] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30978235-8830-493e-b27a-e2376488e4e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.164936] env[61898]: DEBUG nova.compute.provider_tree [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.397031] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Releasing lock "refresh_cache-f968f3df-c70b-466b-8aaa-879354f12d3b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.397347] env[61898]: DEBUG nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 648.397457] env[61898]: DEBUG nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 648.397621] env[61898]: DEBUG nova.network.neutron [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 648.559264] env[61898]: DEBUG nova.network.neutron [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 648.578373] env[61898]: DEBUG oslo_vmware.api [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240425, 'name': PowerOnVM_Task, 'duration_secs': 0.422364} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.578373] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 648.578373] env[61898]: DEBUG nova.compute.manager [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 648.578373] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f8b60c-a335-4fb4-a948-abd82c895271 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.667866] env[61898]: DEBUG nova.scheduler.client.report [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 649.069013] env[61898]: DEBUG nova.network.neutron [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.099888] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.176844] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.953s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.178404] env[61898]: ERROR nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4ee645ae-c20a-4320-bbd1-8806bfe20177, please check neutron logs for more information. [ 649.178404] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Traceback (most recent call last): [ 649.178404] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 649.178404] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] self.driver.spawn(context, instance, image_meta, [ 649.178404] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 649.178404] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 649.178404] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 649.178404] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] vm_ref = self.build_virtual_machine(instance, [ 649.178404] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 649.178404] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] vif_infos = vmwarevif.get_vif_info(self._session, [ 649.178404] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] for vif in network_info: [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] return self._sync_wrapper(fn, *args, **kwargs) [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] self.wait() [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] self[:] = self._gt.wait() [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] return self._exit_event.wait() [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] current.throw(*self._exc) [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 649.178694] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] result = function(*args, **kwargs) [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] return func(*args, **kwargs) [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] raise e [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] nwinfo = self.network_api.allocate_for_instance( [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] created_port_ids = self._update_ports_for_instance( [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] with excutils.save_and_reraise_exception(): [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] self.force_reraise() [ 649.179101] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.179490] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] raise self.value [ 649.179490] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 649.179490] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] updated_port = self._update_port( [ 649.179490] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.179490] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] _ensure_no_port_binding_failure(port) [ 649.179490] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.179490] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] raise exception.PortBindingFailed(port_id=port['id']) [ 649.179490] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] nova.exception.PortBindingFailed: Binding failed for port 4ee645ae-c20a-4320-bbd1-8806bfe20177, please check neutron logs for more information. [ 649.179490] env[61898]: ERROR nova.compute.manager [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] [ 649.179490] env[61898]: DEBUG nova.compute.utils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Binding failed for port 4ee645ae-c20a-4320-bbd1-8806bfe20177, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 649.181580] env[61898]: DEBUG nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Build of instance 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0 was re-scheduled: Binding failed for port 4ee645ae-c20a-4320-bbd1-8806bfe20177, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 649.182956] env[61898]: DEBUG nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 649.183970] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Acquiring lock "refresh_cache-1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.183970] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Acquired lock "refresh_cache-1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.183970] env[61898]: DEBUG nova.network.neutron [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.184662] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.952s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.184885] env[61898]: DEBUG nova.objects.instance [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lazy-loading 'resources' on Instance uuid 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 649.498842] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquiring lock "23a0d825-3132-44d5-8b73-a06a0c0e7b1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.499090] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "23a0d825-3132-44d5-8b73-a06a0c0e7b1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.499272] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquiring lock "23a0d825-3132-44d5-8b73-a06a0c0e7b1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.499493] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "23a0d825-3132-44d5-8b73-a06a0c0e7b1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.499573] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "23a0d825-3132-44d5-8b73-a06a0c0e7b1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.501990] env[61898]: INFO nova.compute.manager [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Terminating instance [ 649.575319] env[61898]: INFO nova.compute.manager [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] [instance: f968f3df-c70b-466b-8aaa-879354f12d3b] Took 1.18 seconds to deallocate network for instance. [ 649.721296] env[61898]: DEBUG nova.network.neutron [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.886625] env[61898]: DEBUG nova.network.neutron [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.006372] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquiring lock "refresh_cache-23a0d825-3132-44d5-8b73-a06a0c0e7b1a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.006458] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquired lock "refresh_cache-23a0d825-3132-44d5-8b73-a06a0c0e7b1a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.006629] env[61898]: DEBUG nova.network.neutron [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 650.120952] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fb7eb4-f4bd-4d48-91e7-c32ab7098b98 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.133086] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09e8acc-2988-4783-ae96-25df344abd0e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.168487] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b20dcb-e2ac-4fce-9d89-f7e095e1fa81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.176299] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60cc2f4-ab95-4dc6-a5fb-d42da0585bb4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.192186] env[61898]: DEBUG nova.compute.provider_tree [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.394883] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Releasing lock "refresh_cache-1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.394883] env[61898]: DEBUG nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 650.394883] env[61898]: DEBUG nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 650.394883] env[61898]: DEBUG nova.network.neutron [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 650.412635] env[61898]: DEBUG nova.network.neutron [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.524806] env[61898]: DEBUG nova.network.neutron [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.622034] env[61898]: DEBUG nova.network.neutron [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.626393] env[61898]: INFO nova.scheduler.client.report [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Deleted allocations for instance f968f3df-c70b-466b-8aaa-879354f12d3b [ 650.696375] env[61898]: DEBUG nova.scheduler.client.report [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 650.914617] env[61898]: DEBUG nova.network.neutron [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.138495] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Releasing lock "refresh_cache-23a0d825-3132-44d5-8b73-a06a0c0e7b1a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.138695] env[61898]: DEBUG nova.compute.manager [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 651.138881] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 651.140614] env[61898]: DEBUG oslo_concurrency.lockutils [None req-201d5c18-f5fd-4407-8b98-bd523a947c2a tempest-ImagesOneServerNegativeTestJSON-1817704843 tempest-ImagesOneServerNegativeTestJSON-1817704843-project-member] Lock "f968f3df-c70b-466b-8aaa-879354f12d3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.295s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.141432] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7eeb7d-fb35-4a7e-b0a2-385b56d0236a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.154709] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 651.155057] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5ef73b1-0ee9-4fb1-b152-71cfb83b5b8d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.164815] env[61898]: DEBUG oslo_vmware.api [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 651.164815] env[61898]: value = "task-1240426" [ 651.164815] env[61898]: _type = "Task" [ 651.164815] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.181487] env[61898]: DEBUG oslo_vmware.api [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.201619] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.017s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.205299] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.263s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.207869] env[61898]: INFO nova.compute.claims [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 651.242716] env[61898]: INFO nova.scheduler.client.report [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Deleted allocations for instance 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6 [ 651.420600] env[61898]: INFO nova.compute.manager [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] [instance: 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0] Took 1.02 seconds to deallocate network for instance. [ 651.647944] env[61898]: DEBUG nova.compute.manager [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 651.678283] env[61898]: DEBUG oslo_vmware.api [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240426, 'name': PowerOffVM_Task, 'duration_secs': 0.131453} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.678630] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.679320] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.679828] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8610e9c3-0340-416d-b3d3-93eb82c85408 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.707040] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.707337] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.707337] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Deleting the datastore file [datastore2] 23a0d825-3132-44d5-8b73-a06a0c0e7b1a {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.707549] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1db0acbe-68d9-480d-90b4-dd2cd28817ce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.716621] env[61898]: DEBUG oslo_vmware.api [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for the task: (returnval){ [ 651.716621] env[61898]: value = "task-1240428" [ 651.716621] env[61898]: _type = "Task" [ 651.716621] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.724186] env[61898]: DEBUG oslo_vmware.api [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240428, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.756549] env[61898]: DEBUG oslo_concurrency.lockutils [None req-89d84987-f63b-4594-bdb8-c7ce87331262 tempest-ServerShowV247Test-397224089 tempest-ServerShowV247Test-397224089-project-member] Lock "41ac9f9b-5cd3-4302-86ac-8ef7cae603b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.734s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.182813] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.238805] env[61898]: DEBUG oslo_vmware.api [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Task: {'id': task-1240428, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092618} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.238805] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.238805] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 652.239436] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 652.239436] env[61898]: INFO nova.compute.manager [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Took 1.10 seconds to destroy the instance on the hypervisor. [ 652.239436] env[61898]: DEBUG oslo.service.loopingcall [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.239618] env[61898]: DEBUG nova.compute.manager [-] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 652.239673] env[61898]: DEBUG nova.network.neutron [-] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 652.263792] env[61898]: DEBUG nova.network.neutron [-] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 652.465241] env[61898]: INFO nova.scheduler.client.report [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Deleted allocations for instance 1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0 [ 652.701560] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e241cce-d1bb-4c4a-9107-42e24cb4e3ab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.709292] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec4af7d-05b9-4397-90c3-307259fe23f7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.739610] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec2e996-27e7-4ed3-a9aa-ef9774f758e6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.747517] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988fc4f9-e70b-45dd-9735-c257047ccc6d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.760778] env[61898]: DEBUG nova.compute.provider_tree [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.770155] env[61898]: DEBUG nova.network.neutron [-] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.984108] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c866ed4a-3883-4371-9c6a-1de17cfeb758 tempest-ServerActionsTestOtherA-1786879577 tempest-ServerActionsTestOtherA-1786879577-project-member] Lock "1ac6419f-9a21-4927-9c6e-01ba2dcaf7a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.003s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.263967] env[61898]: DEBUG nova.scheduler.client.report [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 653.274559] env[61898]: INFO nova.compute.manager [-] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Took 1.03 seconds to deallocate network for instance. [ 653.488797] env[61898]: DEBUG nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 653.772424] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.567s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.772942] env[61898]: DEBUG nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 653.776988] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.127s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.784893] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.931277] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "1aa03975-f18f-4e64-836e-e991b73ee9d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.931816] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "1aa03975-f18f-4e64-836e-e991b73ee9d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.021162] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.280241] env[61898]: DEBUG nova.compute.utils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 654.281803] env[61898]: DEBUG nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 654.284522] env[61898]: DEBUG nova.network.neutron [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 654.394134] env[61898]: DEBUG nova.policy [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02f8a8e77d6e44bc844b916e2dcb913c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e04d71b6dab84c6196cd869f22e956eb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 654.792149] env[61898]: DEBUG nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 654.804020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "aab10d8f-0d25-4351-a627-7222be63895e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.804020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "aab10d8f-0d25-4351-a627-7222be63895e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.814494] env[61898]: DEBUG nova.network.neutron [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Successfully created port: bcddd00e-804c-44a4-96d3-61b1e2ef3fbb {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 654.905333] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a0a15b-cb67-4f30-987a-455cda34f6b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.914129] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e693313d-4c8e-41a8-b92f-5ce5bada812e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.947609] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8870193-2a91-4dd4-a3f5-3e35e28a979f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.952315] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0a67be-9c0e-4257-bd09-787a7d9d6097 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.966327] env[61898]: DEBUG nova.compute.provider_tree [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.468858] env[61898]: DEBUG nova.scheduler.client.report [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 655.809284] env[61898]: DEBUG nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 655.838666] env[61898]: DEBUG nova.virt.hardware [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 655.838936] env[61898]: DEBUG nova.virt.hardware [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 655.839480] env[61898]: DEBUG nova.virt.hardware [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.839767] env[61898]: DEBUG nova.virt.hardware [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 655.839877] env[61898]: DEBUG nova.virt.hardware [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.840036] env[61898]: DEBUG nova.virt.hardware [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 655.840248] env[61898]: DEBUG nova.virt.hardware [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 655.840439] env[61898]: DEBUG nova.virt.hardware [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 655.840666] env[61898]: DEBUG nova.virt.hardware [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 655.840821] env[61898]: DEBUG nova.virt.hardware [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 655.840933] env[61898]: DEBUG nova.virt.hardware [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 655.841812] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64083789-bc07-4bec-a97b-66ac7b529b2a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.855336] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbaaa288-837f-4845-a306-c38160b2c5e4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.977983] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.201s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.978659] env[61898]: ERROR nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c14be22f-8866-4421-8d39-1c5806e94592, please check neutron logs for more information. [ 655.978659] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Traceback (most recent call last): [ 655.978659] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 655.978659] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] self.driver.spawn(context, instance, image_meta, [ 655.978659] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 655.978659] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 655.978659] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 655.978659] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] vm_ref = self.build_virtual_machine(instance, [ 655.978659] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 655.978659] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 655.978659] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] for vif in network_info: [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] return self._sync_wrapper(fn, *args, **kwargs) [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] self.wait() [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] self[:] = self._gt.wait() [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] return self._exit_event.wait() [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] current.throw(*self._exc) [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 655.978989] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] result = function(*args, **kwargs) [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] return func(*args, **kwargs) [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] raise e [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] nwinfo = self.network_api.allocate_for_instance( [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] created_port_ids = self._update_ports_for_instance( [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] with excutils.save_and_reraise_exception(): [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] self.force_reraise() [ 655.979400] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 655.979762] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] raise self.value [ 655.979762] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 655.979762] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] updated_port = self._update_port( [ 655.979762] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 655.979762] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] _ensure_no_port_binding_failure(port) [ 655.979762] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 655.979762] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] raise exception.PortBindingFailed(port_id=port['id']) [ 655.979762] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] nova.exception.PortBindingFailed: Binding failed for port c14be22f-8866-4421-8d39-1c5806e94592, please check neutron logs for more information. [ 655.979762] env[61898]: ERROR nova.compute.manager [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] [ 655.979762] env[61898]: DEBUG nova.compute.utils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Binding failed for port c14be22f-8866-4421-8d39-1c5806e94592, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 655.980877] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.079s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.982403] env[61898]: INFO nova.compute.claims [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.985069] env[61898]: DEBUG nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Build of instance 1f7b6f74-24c1-4db1-9f70-350f307a07b4 was re-scheduled: Binding failed for port c14be22f-8866-4421-8d39-1c5806e94592, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 655.985542] env[61898]: DEBUG nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 655.985876] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquiring lock "refresh_cache-1f7b6f74-24c1-4db1-9f70-350f307a07b4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.985938] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Acquired lock "refresh_cache-1f7b6f74-24c1-4db1-9f70-350f307a07b4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.986077] env[61898]: DEBUG nova.network.neutron [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 656.443305] env[61898]: DEBUG nova.compute.manager [req-cb5d4be5-cb49-451d-84ba-28f3f0aa2206 req-6a0893a1-1b00-4243-b1d4-288a55d95b96 service nova] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Received event network-changed-bcddd00e-804c-44a4-96d3-61b1e2ef3fbb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 656.443496] env[61898]: DEBUG nova.compute.manager [req-cb5d4be5-cb49-451d-84ba-28f3f0aa2206 req-6a0893a1-1b00-4243-b1d4-288a55d95b96 service nova] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Refreshing instance network info cache due to event network-changed-bcddd00e-804c-44a4-96d3-61b1e2ef3fbb. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 656.443708] env[61898]: DEBUG oslo_concurrency.lockutils [req-cb5d4be5-cb49-451d-84ba-28f3f0aa2206 req-6a0893a1-1b00-4243-b1d4-288a55d95b96 service nova] Acquiring lock "refresh_cache-74a2e109-244c-4349-a0b7-0db9e9d4868e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.443847] env[61898]: DEBUG oslo_concurrency.lockutils [req-cb5d4be5-cb49-451d-84ba-28f3f0aa2206 req-6a0893a1-1b00-4243-b1d4-288a55d95b96 service nova] Acquired lock "refresh_cache-74a2e109-244c-4349-a0b7-0db9e9d4868e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.444028] env[61898]: DEBUG nova.network.neutron [req-cb5d4be5-cb49-451d-84ba-28f3f0aa2206 req-6a0893a1-1b00-4243-b1d4-288a55d95b96 service nova] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Refreshing network info cache for port bcddd00e-804c-44a4-96d3-61b1e2ef3fbb {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 656.517024] env[61898]: DEBUG nova.network.neutron [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 656.646957] env[61898]: DEBUG nova.network.neutron [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.878982] env[61898]: ERROR nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bcddd00e-804c-44a4-96d3-61b1e2ef3fbb, please check neutron logs for more information. [ 656.878982] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 656.878982] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 656.878982] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 656.878982] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 656.878982] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 656.878982] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 656.878982] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 656.878982] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 656.878982] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 656.878982] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 656.878982] env[61898]: ERROR nova.compute.manager raise self.value [ 656.878982] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 656.878982] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 656.878982] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 656.878982] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 656.880184] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 656.880184] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 656.880184] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bcddd00e-804c-44a4-96d3-61b1e2ef3fbb, please check neutron logs for more information. [ 656.880184] env[61898]: ERROR nova.compute.manager [ 656.880184] env[61898]: Traceback (most recent call last): [ 656.880184] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 656.880184] env[61898]: listener.cb(fileno) [ 656.880184] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 656.880184] env[61898]: result = function(*args, **kwargs) [ 656.880184] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 656.880184] env[61898]: return func(*args, **kwargs) [ 656.880184] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 656.880184] env[61898]: raise e [ 656.880184] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 656.880184] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 656.880184] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 656.880184] env[61898]: created_port_ids = self._update_ports_for_instance( [ 656.880184] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 656.880184] env[61898]: with excutils.save_and_reraise_exception(): [ 656.880184] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 656.880184] env[61898]: self.force_reraise() [ 656.880184] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 656.880184] env[61898]: raise self.value [ 656.880184] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 656.880184] env[61898]: updated_port = self._update_port( [ 656.880184] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 656.880184] env[61898]: _ensure_no_port_binding_failure(port) [ 656.880184] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 656.880184] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 656.881483] env[61898]: nova.exception.PortBindingFailed: Binding failed for port bcddd00e-804c-44a4-96d3-61b1e2ef3fbb, please check neutron logs for more information. [ 656.881483] env[61898]: Removing descriptor: 20 [ 656.881483] env[61898]: ERROR nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bcddd00e-804c-44a4-96d3-61b1e2ef3fbb, please check neutron logs for more information. [ 656.881483] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Traceback (most recent call last): [ 656.881483] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 656.881483] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] yield resources [ 656.881483] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 656.881483] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] self.driver.spawn(context, instance, image_meta, [ 656.881483] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 656.881483] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 656.881483] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 656.881483] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] vm_ref = self.build_virtual_machine(instance, [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] vif_infos = vmwarevif.get_vif_info(self._session, [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] for vif in network_info: [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] return self._sync_wrapper(fn, *args, **kwargs) [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] self.wait() [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] self[:] = self._gt.wait() [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] return self._exit_event.wait() [ 656.881969] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] result = hub.switch() [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] return self.greenlet.switch() [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] result = function(*args, **kwargs) [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] return func(*args, **kwargs) [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] raise e [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] nwinfo = self.network_api.allocate_for_instance( [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 656.882448] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] created_port_ids = self._update_ports_for_instance( [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] with excutils.save_and_reraise_exception(): [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] self.force_reraise() [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] raise self.value [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] updated_port = self._update_port( [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] _ensure_no_port_binding_failure(port) [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 656.883171] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] raise exception.PortBindingFailed(port_id=port['id']) [ 656.883606] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] nova.exception.PortBindingFailed: Binding failed for port bcddd00e-804c-44a4-96d3-61b1e2ef3fbb, please check neutron logs for more information. [ 656.883606] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] [ 656.883606] env[61898]: INFO nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Terminating instance [ 656.976410] env[61898]: DEBUG nova.network.neutron [req-cb5d4be5-cb49-451d-84ba-28f3f0aa2206 req-6a0893a1-1b00-4243-b1d4-288a55d95b96 service nova] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.150055] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Releasing lock "refresh_cache-1f7b6f74-24c1-4db1-9f70-350f307a07b4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.150055] env[61898]: DEBUG nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 657.150055] env[61898]: DEBUG nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 657.150055] env[61898]: DEBUG nova.network.neutron [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 657.175491] env[61898]: DEBUG nova.network.neutron [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.191842] env[61898]: DEBUG nova.network.neutron [req-cb5d4be5-cb49-451d-84ba-28f3f0aa2206 req-6a0893a1-1b00-4243-b1d4-288a55d95b96 service nova] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.396017] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquiring lock "refresh_cache-74a2e109-244c-4349-a0b7-0db9e9d4868e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.482325] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedfc97f-c226-49d2-a320-9f3bd75f93ad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.491673] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad462a32-328e-4033-9398-e343dfd81e92 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.530793] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9bbf24-8a82-45d9-a56f-5f0872df97d3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.542627] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5727a49d-ba8a-425a-af4c-60491c2a83fe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.564303] env[61898]: DEBUG nova.compute.provider_tree [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.681465] env[61898]: DEBUG nova.network.neutron [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.694754] env[61898]: DEBUG oslo_concurrency.lockutils [req-cb5d4be5-cb49-451d-84ba-28f3f0aa2206 req-6a0893a1-1b00-4243-b1d4-288a55d95b96 service nova] Releasing lock "refresh_cache-74a2e109-244c-4349-a0b7-0db9e9d4868e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.695211] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquired lock "refresh_cache-74a2e109-244c-4349-a0b7-0db9e9d4868e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.695484] env[61898]: DEBUG nova.network.neutron [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 658.069183] env[61898]: DEBUG nova.scheduler.client.report [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 658.188496] env[61898]: INFO nova.compute.manager [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] [instance: 1f7b6f74-24c1-4db1-9f70-350f307a07b4] Took 1.04 seconds to deallocate network for instance. [ 658.225040] env[61898]: DEBUG nova.network.neutron [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.345238] env[61898]: DEBUG nova.network.neutron [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.577824] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.577824] env[61898]: DEBUG nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 658.584941] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.536s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 658.586186] env[61898]: INFO nova.compute.claims [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 658.601278] env[61898]: DEBUG nova.compute.manager [req-35f2a367-43d9-47bd-8df9-f7d4d06a2dfe req-72f7c4ff-9b8d-4605-b52e-89aa95a04702 service nova] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Received event network-vif-deleted-bcddd00e-804c-44a4-96d3-61b1e2ef3fbb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 658.848201] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Releasing lock "refresh_cache-74a2e109-244c-4349-a0b7-0db9e9d4868e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.848641] env[61898]: DEBUG nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 658.849095] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 658.849161] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87ef83d2-c489-4d1c-8308-23f3bfb57c7e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.862876] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16e09aa-d93c-4d63-9765-261dd2f4919e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.888299] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 74a2e109-244c-4349-a0b7-0db9e9d4868e could not be found. [ 658.888299] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 658.888299] env[61898]: INFO nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 658.888299] env[61898]: DEBUG oslo.service.loopingcall [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 658.888299] env[61898]: DEBUG nova.compute.manager [-] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 658.888299] env[61898]: DEBUG nova.network.neutron [-] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 658.904336] env[61898]: DEBUG nova.network.neutron [-] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 659.064173] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "45b8dc91-b577-4548-bf3a-32c7c936c616" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.064173] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.091851] env[61898]: DEBUG nova.compute.utils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 659.097018] env[61898]: DEBUG nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 659.097018] env[61898]: DEBUG nova.network.neutron [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 659.139982] env[61898]: DEBUG nova.policy [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e56a517c1aca416d810368ad50a1719f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '281dcbf7480543588e645530376457ad', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 659.236476] env[61898]: INFO nova.scheduler.client.report [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Deleted allocations for instance 1f7b6f74-24c1-4db1-9f70-350f307a07b4 [ 659.407882] env[61898]: DEBUG nova.network.neutron [-] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.441531] env[61898]: DEBUG nova.network.neutron [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Successfully created port: 3f69a569-f8a4-4ab1-81f5-2e4dd9623732 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.599101] env[61898]: DEBUG nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 659.749419] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3c945972-aab4-4f20-ab67-7997e01228e1 tempest-MigrationsAdminTest-1809000760 tempest-MigrationsAdminTest-1809000760-project-member] Lock "1f7b6f74-24c1-4db1-9f70-350f307a07b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.026s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.912492] env[61898]: INFO nova.compute.manager [-] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Took 1.02 seconds to deallocate network for instance. [ 659.916340] env[61898]: DEBUG nova.compute.claims [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 659.916340] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.009088] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511f6b6a-6646-4609-be22-2e2051f50d12 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.015291] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c146279-af47-4831-b802-9d69f03bf8cd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.046997] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-788fbd67-2e7b-4227-81de-82e2002728a3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.059015] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2470d79-2af5-4006-8bb9-5f05471e6b6a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.071292] env[61898]: DEBUG nova.compute.provider_tree [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.112418] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "320577e5-f197-4f66-a94f-9b9ba2479325" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.112748] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "320577e5-f197-4f66-a94f-9b9ba2479325" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.257309] env[61898]: DEBUG nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 660.414041] env[61898]: ERROR nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3f69a569-f8a4-4ab1-81f5-2e4dd9623732, please check neutron logs for more information. [ 660.414041] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 660.414041] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 660.414041] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 660.414041] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 660.414041] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 660.414041] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 660.414041] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 660.414041] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.414041] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 660.414041] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.414041] env[61898]: ERROR nova.compute.manager raise self.value [ 660.414041] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 660.414041] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 660.414041] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.414041] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 660.414564] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.414564] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 660.414564] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3f69a569-f8a4-4ab1-81f5-2e4dd9623732, please check neutron logs for more information. [ 660.414564] env[61898]: ERROR nova.compute.manager [ 660.414564] env[61898]: Traceback (most recent call last): [ 660.414564] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 660.414564] env[61898]: listener.cb(fileno) [ 660.414564] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 660.414564] env[61898]: result = function(*args, **kwargs) [ 660.414564] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 660.414564] env[61898]: return func(*args, **kwargs) [ 660.414564] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 660.414564] env[61898]: raise e [ 660.414564] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 660.414564] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 660.414564] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 660.414564] env[61898]: created_port_ids = self._update_ports_for_instance( [ 660.414564] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 660.414564] env[61898]: with excutils.save_and_reraise_exception(): [ 660.414564] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.414564] env[61898]: self.force_reraise() [ 660.414564] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.414564] env[61898]: raise self.value [ 660.414564] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 660.414564] env[61898]: updated_port = self._update_port( [ 660.414564] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.414564] env[61898]: _ensure_no_port_binding_failure(port) [ 660.414564] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.414564] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 660.415524] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 3f69a569-f8a4-4ab1-81f5-2e4dd9623732, please check neutron logs for more information. [ 660.415524] env[61898]: Removing descriptor: 20 [ 660.574755] env[61898]: DEBUG nova.scheduler.client.report [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 660.611386] env[61898]: DEBUG nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 660.625546] env[61898]: DEBUG nova.compute.manager [req-75335b5a-cb27-4337-999b-22041a2b08b4 req-bc1012cc-a32a-4d31-be05-53c59729030e service nova] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Received event network-changed-3f69a569-f8a4-4ab1-81f5-2e4dd9623732 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 660.625764] env[61898]: DEBUG nova.compute.manager [req-75335b5a-cb27-4337-999b-22041a2b08b4 req-bc1012cc-a32a-4d31-be05-53c59729030e service nova] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Refreshing instance network info cache due to event network-changed-3f69a569-f8a4-4ab1-81f5-2e4dd9623732. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 660.625993] env[61898]: DEBUG oslo_concurrency.lockutils [req-75335b5a-cb27-4337-999b-22041a2b08b4 req-bc1012cc-a32a-4d31-be05-53c59729030e service nova] Acquiring lock "refresh_cache-2cfdb95a-8c00-4528-a4bc-55f4ced67a89" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.626544] env[61898]: DEBUG oslo_concurrency.lockutils [req-75335b5a-cb27-4337-999b-22041a2b08b4 req-bc1012cc-a32a-4d31-be05-53c59729030e service nova] Acquired lock "refresh_cache-2cfdb95a-8c00-4528-a4bc-55f4ced67a89" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.626727] env[61898]: DEBUG nova.network.neutron [req-75335b5a-cb27-4337-999b-22041a2b08b4 req-bc1012cc-a32a-4d31-be05-53c59729030e service nova] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Refreshing network info cache for port 3f69a569-f8a4-4ab1-81f5-2e4dd9623732 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 660.638494] env[61898]: DEBUG nova.virt.hardware [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 660.638771] env[61898]: DEBUG nova.virt.hardware [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 660.638978] env[61898]: DEBUG nova.virt.hardware [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.639184] env[61898]: DEBUG nova.virt.hardware [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 660.639331] env[61898]: DEBUG nova.virt.hardware [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.639478] env[61898]: DEBUG nova.virt.hardware [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 660.639683] env[61898]: DEBUG nova.virt.hardware [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 660.639839] env[61898]: DEBUG nova.virt.hardware [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 660.640007] env[61898]: DEBUG nova.virt.hardware [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 660.640177] env[61898]: DEBUG nova.virt.hardware [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 660.640379] env[61898]: DEBUG nova.virt.hardware [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 660.641533] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce14ff8-f455-49d3-8802-fae19718b64c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.650440] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486ed0a7-31e2-47dd-8693-e4791254d602 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.665062] env[61898]: ERROR nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3f69a569-f8a4-4ab1-81f5-2e4dd9623732, please check neutron logs for more information. [ 660.665062] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Traceback (most recent call last): [ 660.665062] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 660.665062] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] yield resources [ 660.665062] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 660.665062] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] self.driver.spawn(context, instance, image_meta, [ 660.665062] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 660.665062] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] self._vmops.spawn(context, instance, image_meta, injected_files, [ 660.665062] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 660.665062] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] vm_ref = self.build_virtual_machine(instance, [ 660.665062] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] vif_infos = vmwarevif.get_vif_info(self._session, [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] for vif in network_info: [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] return self._sync_wrapper(fn, *args, **kwargs) [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] self.wait() [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] self[:] = self._gt.wait() [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] return self._exit_event.wait() [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 660.665379] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] current.throw(*self._exc) [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] result = function(*args, **kwargs) [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] return func(*args, **kwargs) [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] raise e [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] nwinfo = self.network_api.allocate_for_instance( [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] created_port_ids = self._update_ports_for_instance( [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] with excutils.save_and_reraise_exception(): [ 660.665718] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.666046] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] self.force_reraise() [ 660.666046] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.666046] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] raise self.value [ 660.666046] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 660.666046] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] updated_port = self._update_port( [ 660.666046] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.666046] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] _ensure_no_port_binding_failure(port) [ 660.666046] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.666046] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] raise exception.PortBindingFailed(port_id=port['id']) [ 660.666046] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] nova.exception.PortBindingFailed: Binding failed for port 3f69a569-f8a4-4ab1-81f5-2e4dd9623732, please check neutron logs for more information. [ 660.666046] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] [ 660.666046] env[61898]: INFO nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Terminating instance [ 660.786173] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.080919] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.081184] env[61898]: DEBUG nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 661.084968] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.514s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.089135] env[61898]: INFO nova.compute.claims [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 661.150213] env[61898]: DEBUG nova.network.neutron [req-75335b5a-cb27-4337-999b-22041a2b08b4 req-bc1012cc-a32a-4d31-be05-53c59729030e service nova] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 661.171896] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquiring lock "refresh_cache-2cfdb95a-8c00-4528-a4bc-55f4ced67a89" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.286215] env[61898]: DEBUG nova.network.neutron [req-75335b5a-cb27-4337-999b-22041a2b08b4 req-bc1012cc-a32a-4d31-be05-53c59729030e service nova] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.595362] env[61898]: DEBUG nova.compute.utils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 661.596783] env[61898]: DEBUG nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 661.596945] env[61898]: DEBUG nova.network.neutron [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 661.652357] env[61898]: DEBUG nova.policy [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fd88df017c67404b99bd25190582f1d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67dada4d35ad481c828edecb23efdf77', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 661.788994] env[61898]: DEBUG oslo_concurrency.lockutils [req-75335b5a-cb27-4337-999b-22041a2b08b4 req-bc1012cc-a32a-4d31-be05-53c59729030e service nova] Releasing lock "refresh_cache-2cfdb95a-8c00-4528-a4bc-55f4ced67a89" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.789654] env[61898]: DEBUG nova.compute.manager [req-75335b5a-cb27-4337-999b-22041a2b08b4 req-bc1012cc-a32a-4d31-be05-53c59729030e service nova] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Received event network-vif-deleted-3f69a569-f8a4-4ab1-81f5-2e4dd9623732 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 661.789735] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquired lock "refresh_cache-2cfdb95a-8c00-4528-a4bc-55f4ced67a89" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.789875] env[61898]: DEBUG nova.network.neutron [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 661.888024] env[61898]: DEBUG nova.network.neutron [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Successfully created port: 515d2f3e-160a-4cef-a9c3-7c1902058771 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.104734] env[61898]: DEBUG nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 662.314570] env[61898]: DEBUG nova.network.neutron [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.433772] env[61898]: DEBUG nova.network.neutron [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.533510] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c260aefb-7804-4cf3-9cfe-200b04d454ad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.541779] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab49e60-a93c-4f2f-8785-157032178b73 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.576929] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7f64ee-8683-4926-8389-2439dfacdaad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.585471] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a32b0ca-db2c-4703-b062-4159e507bf6a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.599712] env[61898]: DEBUG nova.compute.provider_tree [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.649313] env[61898]: DEBUG nova.compute.manager [req-d1ba6d62-efd2-45a3-811c-5ad5f176af92 req-b3075ed7-995d-44d0-b2df-3cd964643756 service nova] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Received event network-changed-515d2f3e-160a-4cef-a9c3-7c1902058771 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 662.649504] env[61898]: DEBUG nova.compute.manager [req-d1ba6d62-efd2-45a3-811c-5ad5f176af92 req-b3075ed7-995d-44d0-b2df-3cd964643756 service nova] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Refreshing instance network info cache due to event network-changed-515d2f3e-160a-4cef-a9c3-7c1902058771. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 662.650150] env[61898]: DEBUG oslo_concurrency.lockutils [req-d1ba6d62-efd2-45a3-811c-5ad5f176af92 req-b3075ed7-995d-44d0-b2df-3cd964643756 service nova] Acquiring lock "refresh_cache-6fc82922-9142-475b-99a6-bbc5ee43b30b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.650240] env[61898]: DEBUG oslo_concurrency.lockutils [req-d1ba6d62-efd2-45a3-811c-5ad5f176af92 req-b3075ed7-995d-44d0-b2df-3cd964643756 service nova] Acquired lock "refresh_cache-6fc82922-9142-475b-99a6-bbc5ee43b30b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.650443] env[61898]: DEBUG nova.network.neutron [req-d1ba6d62-efd2-45a3-811c-5ad5f176af92 req-b3075ed7-995d-44d0-b2df-3cd964643756 service nova] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Refreshing network info cache for port 515d2f3e-160a-4cef-a9c3-7c1902058771 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 662.782910] env[61898]: ERROR nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 515d2f3e-160a-4cef-a9c3-7c1902058771, please check neutron logs for more information. [ 662.782910] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 662.782910] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 662.782910] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 662.782910] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.782910] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 662.782910] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.782910] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 662.782910] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.782910] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 662.782910] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.782910] env[61898]: ERROR nova.compute.manager raise self.value [ 662.782910] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.782910] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 662.782910] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.782910] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 662.783346] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.783346] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 662.783346] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 515d2f3e-160a-4cef-a9c3-7c1902058771, please check neutron logs for more information. [ 662.783346] env[61898]: ERROR nova.compute.manager [ 662.783346] env[61898]: Traceback (most recent call last): [ 662.783346] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 662.783346] env[61898]: listener.cb(fileno) [ 662.783346] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 662.783346] env[61898]: result = function(*args, **kwargs) [ 662.783346] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 662.783346] env[61898]: return func(*args, **kwargs) [ 662.783346] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 662.783346] env[61898]: raise e [ 662.783346] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 662.783346] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 662.783346] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 662.783346] env[61898]: created_port_ids = self._update_ports_for_instance( [ 662.783346] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 662.783346] env[61898]: with excutils.save_and_reraise_exception(): [ 662.783346] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.783346] env[61898]: self.force_reraise() [ 662.783346] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.783346] env[61898]: raise self.value [ 662.783346] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 662.783346] env[61898]: updated_port = self._update_port( [ 662.783346] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.783346] env[61898]: _ensure_no_port_binding_failure(port) [ 662.783346] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.783346] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 662.784021] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 515d2f3e-160a-4cef-a9c3-7c1902058771, please check neutron logs for more information. [ 662.784021] env[61898]: Removing descriptor: 20 [ 662.936970] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Releasing lock "refresh_cache-2cfdb95a-8c00-4528-a4bc-55f4ced67a89" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.937436] env[61898]: DEBUG nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 662.937637] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 662.937945] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d9b29bc-b696-4185-8939-700abcf5e828 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.949167] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e56d67a-5c8a-4fe9-a149-c90ff34ec372 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.970670] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2cfdb95a-8c00-4528-a4bc-55f4ced67a89 could not be found. [ 662.970879] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 662.971072] env[61898]: INFO nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Took 0.03 seconds to destroy the instance on the hypervisor. [ 662.971348] env[61898]: DEBUG oslo.service.loopingcall [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.971591] env[61898]: DEBUG nova.compute.manager [-] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 662.971688] env[61898]: DEBUG nova.network.neutron [-] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 662.987617] env[61898]: DEBUG nova.network.neutron [-] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.105934] env[61898]: DEBUG nova.scheduler.client.report [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 663.113644] env[61898]: DEBUG nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 663.138583] env[61898]: DEBUG nova.virt.hardware [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 663.138834] env[61898]: DEBUG nova.virt.hardware [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 663.139068] env[61898]: DEBUG nova.virt.hardware [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 663.139182] env[61898]: DEBUG nova.virt.hardware [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 663.139324] env[61898]: DEBUG nova.virt.hardware [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 663.139469] env[61898]: DEBUG nova.virt.hardware [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 663.139667] env[61898]: DEBUG nova.virt.hardware [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 663.139822] env[61898]: DEBUG nova.virt.hardware [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 663.139982] env[61898]: DEBUG nova.virt.hardware [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 663.140158] env[61898]: DEBUG nova.virt.hardware [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 663.140348] env[61898]: DEBUG nova.virt.hardware [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 663.141421] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d18cb67-79c8-43fd-8208-23d0c4a8619d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.149324] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ff9931-87f2-4893-95f3-5a42079dcc6b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.165037] env[61898]: ERROR nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 515d2f3e-160a-4cef-a9c3-7c1902058771, please check neutron logs for more information. [ 663.165037] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Traceback (most recent call last): [ 663.165037] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 663.165037] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] yield resources [ 663.165037] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 663.165037] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] self.driver.spawn(context, instance, image_meta, [ 663.165037] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 663.165037] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 663.165037] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 663.165037] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] vm_ref = self.build_virtual_machine(instance, [ 663.165037] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] vif_infos = vmwarevif.get_vif_info(self._session, [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] for vif in network_info: [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] return self._sync_wrapper(fn, *args, **kwargs) [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] self.wait() [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] self[:] = self._gt.wait() [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] return self._exit_event.wait() [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 663.165433] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] current.throw(*self._exc) [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] result = function(*args, **kwargs) [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] return func(*args, **kwargs) [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] raise e [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] nwinfo = self.network_api.allocate_for_instance( [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] created_port_ids = self._update_ports_for_instance( [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] with excutils.save_and_reraise_exception(): [ 663.165797] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 663.166181] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] self.force_reraise() [ 663.166181] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 663.166181] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] raise self.value [ 663.166181] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 663.166181] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] updated_port = self._update_port( [ 663.166181] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 663.166181] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] _ensure_no_port_binding_failure(port) [ 663.166181] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 663.166181] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] raise exception.PortBindingFailed(port_id=port['id']) [ 663.166181] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] nova.exception.PortBindingFailed: Binding failed for port 515d2f3e-160a-4cef-a9c3-7c1902058771, please check neutron logs for more information. [ 663.166181] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] [ 663.166181] env[61898]: INFO nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Terminating instance [ 663.172678] env[61898]: DEBUG nova.network.neutron [req-d1ba6d62-efd2-45a3-811c-5ad5f176af92 req-b3075ed7-995d-44d0-b2df-3cd964643756 service nova] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 663.285439] env[61898]: DEBUG nova.network.neutron [req-d1ba6d62-efd2-45a3-811c-5ad5f176af92 req-b3075ed7-995d-44d0-b2df-3cd964643756 service nova] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.490350] env[61898]: DEBUG nova.network.neutron [-] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.610899] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.526s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.611699] env[61898]: DEBUG nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 663.614526] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.935s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.669465] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Acquiring lock "refresh_cache-6fc82922-9142-475b-99a6-bbc5ee43b30b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.789414] env[61898]: DEBUG oslo_concurrency.lockutils [req-d1ba6d62-efd2-45a3-811c-5ad5f176af92 req-b3075ed7-995d-44d0-b2df-3cd964643756 service nova] Releasing lock "refresh_cache-6fc82922-9142-475b-99a6-bbc5ee43b30b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.789814] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Acquired lock "refresh_cache-6fc82922-9142-475b-99a6-bbc5ee43b30b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.790007] env[61898]: DEBUG nova.network.neutron [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 663.992499] env[61898]: INFO nova.compute.manager [-] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Took 1.02 seconds to deallocate network for instance. [ 663.995041] env[61898]: DEBUG nova.compute.claims [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 663.995227] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.122932] env[61898]: DEBUG nova.compute.utils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 664.122932] env[61898]: DEBUG nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 664.122932] env[61898]: DEBUG nova.network.neutron [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 664.176299] env[61898]: DEBUG nova.policy [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3cd97dceb35c490eb12e255a18c001b7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f673066e8af4088bef76384e62cadd4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 664.335078] env[61898]: DEBUG nova.network.neutron [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.531148] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25b261f-eff2-47a7-9dbc-fd929cce0103 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.544568] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fce293-127f-472e-b194-6cff3faa6a78 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.588030] env[61898]: DEBUG nova.network.neutron [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.589135] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b69de9-1243-460b-8edd-6ffcd7be9664 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.598532] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74d5acb-cf33-4841-b21f-a325b7957495 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.618123] env[61898]: DEBUG nova.compute.provider_tree [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.627864] env[61898]: DEBUG nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 664.719118] env[61898]: DEBUG nova.compute.manager [req-e44c5f65-14db-4dd3-8c0b-70d9ba9e22b7 req-132291ae-3ce1-41e0-84e4-97d06b976539 service nova] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Received event network-vif-deleted-515d2f3e-160a-4cef-a9c3-7c1902058771 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 664.723542] env[61898]: DEBUG nova.network.neutron [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Successfully created port: f19c6e84-b5a3-4f3f-b5de-27147c797d39 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.093823] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Releasing lock "refresh_cache-6fc82922-9142-475b-99a6-bbc5ee43b30b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.094297] env[61898]: DEBUG nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 665.094533] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 665.094853] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d43503a-33b3-4652-94ea-3911fc7b4e6d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.104342] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38f5cde-d6de-49a0-8d70-4610c000fc25 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.121618] env[61898]: DEBUG nova.scheduler.client.report [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 665.129340] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6fc82922-9142-475b-99a6-bbc5ee43b30b could not be found. [ 665.129618] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 665.129873] env[61898]: INFO nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 665.130164] env[61898]: DEBUG oslo.service.loopingcall [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 665.130681] env[61898]: DEBUG nova.compute.manager [-] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 665.130899] env[61898]: DEBUG nova.network.neutron [-] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 665.154430] env[61898]: DEBUG nova.network.neutron [-] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.608426] env[61898]: ERROR nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f19c6e84-b5a3-4f3f-b5de-27147c797d39, please check neutron logs for more information. [ 665.608426] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 665.608426] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 665.608426] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 665.608426] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 665.608426] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 665.608426] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 665.608426] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 665.608426] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.608426] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 665.608426] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.608426] env[61898]: ERROR nova.compute.manager raise self.value [ 665.608426] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 665.608426] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 665.608426] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.608426] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 665.609145] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.609145] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 665.609145] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f19c6e84-b5a3-4f3f-b5de-27147c797d39, please check neutron logs for more information. [ 665.609145] env[61898]: ERROR nova.compute.manager [ 665.609145] env[61898]: Traceback (most recent call last): [ 665.609145] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 665.609145] env[61898]: listener.cb(fileno) [ 665.609145] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 665.609145] env[61898]: result = function(*args, **kwargs) [ 665.609145] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 665.609145] env[61898]: return func(*args, **kwargs) [ 665.609145] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 665.609145] env[61898]: raise e [ 665.609145] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 665.609145] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 665.609145] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 665.609145] env[61898]: created_port_ids = self._update_ports_for_instance( [ 665.609145] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 665.609145] env[61898]: with excutils.save_and_reraise_exception(): [ 665.609145] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.609145] env[61898]: self.force_reraise() [ 665.609145] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.609145] env[61898]: raise self.value [ 665.609145] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 665.609145] env[61898]: updated_port = self._update_port( [ 665.609145] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.609145] env[61898]: _ensure_no_port_binding_failure(port) [ 665.609145] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.609145] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 665.610088] env[61898]: nova.exception.PortBindingFailed: Binding failed for port f19c6e84-b5a3-4f3f-b5de-27147c797d39, please check neutron logs for more information. [ 665.610088] env[61898]: Removing descriptor: 20 [ 665.631763] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.017s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.632401] env[61898]: ERROR nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3d709c30-22ff-466c-9489-a58fc2d8f251, please check neutron logs for more information. [ 665.632401] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Traceback (most recent call last): [ 665.632401] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 665.632401] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] self.driver.spawn(context, instance, image_meta, [ 665.632401] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 665.632401] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 665.632401] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 665.632401] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] vm_ref = self.build_virtual_machine(instance, [ 665.632401] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 665.632401] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] vif_infos = vmwarevif.get_vif_info(self._session, [ 665.632401] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] for vif in network_info: [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] return self._sync_wrapper(fn, *args, **kwargs) [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] self.wait() [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] self[:] = self._gt.wait() [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] return self._exit_event.wait() [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] current.throw(*self._exc) [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 665.632818] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] result = function(*args, **kwargs) [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] return func(*args, **kwargs) [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] raise e [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] nwinfo = self.network_api.allocate_for_instance( [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] created_port_ids = self._update_ports_for_instance( [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] with excutils.save_and_reraise_exception(): [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] self.force_reraise() [ 665.633153] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.633468] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] raise self.value [ 665.633468] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 665.633468] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] updated_port = self._update_port( [ 665.633468] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.633468] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] _ensure_no_port_binding_failure(port) [ 665.633468] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.633468] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] raise exception.PortBindingFailed(port_id=port['id']) [ 665.633468] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] nova.exception.PortBindingFailed: Binding failed for port 3d709c30-22ff-466c-9489-a58fc2d8f251, please check neutron logs for more information. [ 665.633468] env[61898]: ERROR nova.compute.manager [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] [ 665.633468] env[61898]: DEBUG nova.compute.utils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Binding failed for port 3d709c30-22ff-466c-9489-a58fc2d8f251, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 665.634412] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 18.333s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.638123] env[61898]: DEBUG nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 665.641949] env[61898]: DEBUG nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Build of instance 082fe687-5038-4c31-9b27-f8a5c548cdc1 was re-scheduled: Binding failed for port 3d709c30-22ff-466c-9489-a58fc2d8f251, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 665.643229] env[61898]: DEBUG nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 665.643229] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquiring lock "refresh_cache-082fe687-5038-4c31-9b27-f8a5c548cdc1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.643229] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquired lock "refresh_cache-082fe687-5038-4c31-9b27-f8a5c548cdc1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.643229] env[61898]: DEBUG nova.network.neutron [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 665.657031] env[61898]: DEBUG nova.network.neutron [-] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.667745] env[61898]: DEBUG nova.virt.hardware [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 665.667978] env[61898]: DEBUG nova.virt.hardware [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 665.668142] env[61898]: DEBUG nova.virt.hardware [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.668325] env[61898]: DEBUG nova.virt.hardware [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 665.668468] env[61898]: DEBUG nova.virt.hardware [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.668607] env[61898]: DEBUG nova.virt.hardware [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 665.668801] env[61898]: DEBUG nova.virt.hardware [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 665.668953] env[61898]: DEBUG nova.virt.hardware [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 665.669125] env[61898]: DEBUG nova.virt.hardware [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 665.669283] env[61898]: DEBUG nova.virt.hardware [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 665.669448] env[61898]: DEBUG nova.virt.hardware [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 665.670829] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a7ba12-cf0a-4d3d-bad8-833d693bcacb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.680899] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50989b6-15fb-4012-82e6-b3007cbbc1ce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.695817] env[61898]: ERROR nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f19c6e84-b5a3-4f3f-b5de-27147c797d39, please check neutron logs for more information. [ 665.695817] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Traceback (most recent call last): [ 665.695817] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 665.695817] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] yield resources [ 665.695817] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 665.695817] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] self.driver.spawn(context, instance, image_meta, [ 665.695817] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 665.695817] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 665.695817] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 665.695817] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] vm_ref = self.build_virtual_machine(instance, [ 665.695817] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] vif_infos = vmwarevif.get_vif_info(self._session, [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] for vif in network_info: [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] return self._sync_wrapper(fn, *args, **kwargs) [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] self.wait() [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] self[:] = self._gt.wait() [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] return self._exit_event.wait() [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 665.696146] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] current.throw(*self._exc) [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] result = function(*args, **kwargs) [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] return func(*args, **kwargs) [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] raise e [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] nwinfo = self.network_api.allocate_for_instance( [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] created_port_ids = self._update_ports_for_instance( [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] with excutils.save_and_reraise_exception(): [ 665.696489] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 665.697137] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] self.force_reraise() [ 665.697137] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 665.697137] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] raise self.value [ 665.697137] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 665.697137] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] updated_port = self._update_port( [ 665.697137] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 665.697137] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] _ensure_no_port_binding_failure(port) [ 665.697137] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 665.697137] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] raise exception.PortBindingFailed(port_id=port['id']) [ 665.697137] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] nova.exception.PortBindingFailed: Binding failed for port f19c6e84-b5a3-4f3f-b5de-27147c797d39, please check neutron logs for more information. [ 665.697137] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] [ 665.697137] env[61898]: INFO nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Terminating instance [ 666.158454] env[61898]: INFO nova.compute.manager [-] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Took 1.03 seconds to deallocate network for instance. [ 666.161141] env[61898]: DEBUG nova.network.neutron [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.163065] env[61898]: DEBUG nova.compute.claims [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 666.163197] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.200147] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Acquiring lock "refresh_cache-31239011-3cd9-4fea-a99d-26d09884497b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.200364] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Acquired lock "refresh_cache-31239011-3cd9-4fea-a99d-26d09884497b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.200573] env[61898]: DEBUG nova.network.neutron [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.240275] env[61898]: DEBUG nova.network.neutron [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.670656] env[61898]: WARNING nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 23a0d825-3132-44d5-8b73-a06a0c0e7b1a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 666.724193] env[61898]: DEBUG nova.network.neutron [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.743839] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Releasing lock "refresh_cache-082fe687-5038-4c31-9b27-f8a5c548cdc1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.744117] env[61898]: DEBUG nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 666.744304] env[61898]: DEBUG nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 666.744465] env[61898]: DEBUG nova.network.neutron [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 666.752935] env[61898]: DEBUG nova.compute.manager [req-3065ceb5-6cb1-494b-a1da-6672be8cf743 req-157fe12d-ded4-4c42-98e8-e5f414afee61 service nova] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Received event network-changed-f19c6e84-b5a3-4f3f-b5de-27147c797d39 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 666.752935] env[61898]: DEBUG nova.compute.manager [req-3065ceb5-6cb1-494b-a1da-6672be8cf743 req-157fe12d-ded4-4c42-98e8-e5f414afee61 service nova] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Refreshing instance network info cache due to event network-changed-f19c6e84-b5a3-4f3f-b5de-27147c797d39. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 666.752935] env[61898]: DEBUG oslo_concurrency.lockutils [req-3065ceb5-6cb1-494b-a1da-6672be8cf743 req-157fe12d-ded4-4c42-98e8-e5f414afee61 service nova] Acquiring lock "refresh_cache-31239011-3cd9-4fea-a99d-26d09884497b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.772286] env[61898]: DEBUG nova.network.neutron [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.835135] env[61898]: DEBUG nova.network.neutron [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.174063] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 082fe687-5038-4c31-9b27-f8a5c548cdc1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 667.174229] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance c6e77597-5a5d-4b86-8588-7056828025cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 667.174280] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 74a2e109-244c-4349-a0b7-0db9e9d4868e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 667.174357] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 2cfdb95a-8c00-4528-a4bc-55f4ced67a89 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 667.174473] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 6fc82922-9142-475b-99a6-bbc5ee43b30b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 667.174585] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 31239011-3cd9-4fea-a99d-26d09884497b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 667.275227] env[61898]: DEBUG nova.network.neutron [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.338244] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Releasing lock "refresh_cache-31239011-3cd9-4fea-a99d-26d09884497b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.338443] env[61898]: DEBUG nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 667.338639] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 667.338954] env[61898]: DEBUG oslo_concurrency.lockutils [req-3065ceb5-6cb1-494b-a1da-6672be8cf743 req-157fe12d-ded4-4c42-98e8-e5f414afee61 service nova] Acquired lock "refresh_cache-31239011-3cd9-4fea-a99d-26d09884497b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.339140] env[61898]: DEBUG nova.network.neutron [req-3065ceb5-6cb1-494b-a1da-6672be8cf743 req-157fe12d-ded4-4c42-98e8-e5f414afee61 service nova] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Refreshing network info cache for port f19c6e84-b5a3-4f3f-b5de-27147c797d39 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 667.340162] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8176f15-0795-4405-bddf-246f1cd32d0d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.352238] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce0c1ac-5308-4e47-b5f6-b455727872dd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.375860] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 31239011-3cd9-4fea-a99d-26d09884497b could not be found. [ 667.376021] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 667.376210] env[61898]: INFO nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 667.376445] env[61898]: DEBUG oslo.service.loopingcall [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.376713] env[61898]: DEBUG nova.compute.manager [-] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 667.376807] env[61898]: DEBUG nova.network.neutron [-] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 667.393617] env[61898]: DEBUG nova.network.neutron [-] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.677896] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 8ab18b24-91d4-4718-8f1a-d82f4226ba2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 667.777954] env[61898]: INFO nova.compute.manager [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 082fe687-5038-4c31-9b27-f8a5c548cdc1] Took 1.03 seconds to deallocate network for instance. [ 667.860081] env[61898]: DEBUG nova.network.neutron [req-3065ceb5-6cb1-494b-a1da-6672be8cf743 req-157fe12d-ded4-4c42-98e8-e5f414afee61 service nova] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.896686] env[61898]: DEBUG nova.network.neutron [-] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.935764] env[61898]: DEBUG nova.network.neutron [req-3065ceb5-6cb1-494b-a1da-6672be8cf743 req-157fe12d-ded4-4c42-98e8-e5f414afee61 service nova] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.182056] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 81fd9ccc-a267-498d-93d4-8adf894ee8d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 668.401637] env[61898]: INFO nova.compute.manager [-] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Took 1.02 seconds to deallocate network for instance. [ 668.404044] env[61898]: DEBUG nova.compute.claims [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 668.404272] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.438097] env[61898]: DEBUG oslo_concurrency.lockutils [req-3065ceb5-6cb1-494b-a1da-6672be8cf743 req-157fe12d-ded4-4c42-98e8-e5f414afee61 service nova] Releasing lock "refresh_cache-31239011-3cd9-4fea-a99d-26d09884497b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.438389] env[61898]: DEBUG nova.compute.manager [req-3065ceb5-6cb1-494b-a1da-6672be8cf743 req-157fe12d-ded4-4c42-98e8-e5f414afee61 service nova] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Received event network-vif-deleted-f19c6e84-b5a3-4f3f-b5de-27147c797d39 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 668.684889] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 1f70b6e1-b534-40a1-b262-e0a5ce3e425e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 668.805964] env[61898]: INFO nova.scheduler.client.report [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Deleted allocations for instance 082fe687-5038-4c31-9b27-f8a5c548cdc1 [ 669.189126] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 669.316176] env[61898]: DEBUG oslo_concurrency.lockutils [None req-80c3ced5-6300-4670-a358-7647626ceba2 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "082fe687-5038-4c31-9b27-f8a5c548cdc1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.002s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.693097] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 4a6adf12-7106-46ce-abb0-fe8c5c212905 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 669.818952] env[61898]: DEBUG nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 670.195605] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 8ac73bda-db02-4427-9730-003561d078ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 670.340731] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.699330] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 10e3f3dd-165b-4049-8c1f-f561c91717c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 671.202811] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 03ba4dad-5c58-4582-a36e-95de69b37474 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 671.705611] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 45138019-b69e-459b-99cf-47a47aa58e40 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 672.209160] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 672.712195] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 2887126b-6db5-4578-a063-552e774542cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 673.215854] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 9e6a3749-1974-4818-9cc6-76367d41b7e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 673.719640] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 29eadea9-fa85-4f51-97d0-a941e1658094 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 674.222356] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance a0580308-d25b-47cb-9c1c-adb763be7925 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 674.481028] env[61898]: DEBUG oslo_concurrency.lockutils [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Acquiring lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.481258] env[61898]: DEBUG oslo_concurrency.lockutils [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.728889] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 1fb4535d-47d8-45c5-b6d6-d05e57237b98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.233392] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 4c744673-0d9b-44ef-938f-372b101a2053 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 675.735734] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 34338563-05d4-477b-8480-6ef4cbf28e72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.239811] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 1aa03975-f18f-4e64-836e-e991b73ee9d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.743043] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance aab10d8f-0d25-4351-a627-7222be63895e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.245446] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 45b8dc91-b577-4548-bf3a-32c7c936c616 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.748184] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 320577e5-f197-4f66-a94f-9b9ba2479325 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 677.748569] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 677.748629] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 678.034355] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdc1837-bfe6-4602-b2fe-79d4ab625373 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.042021] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecffa64a-d127-486a-ac74-f555593b3b7e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.072220] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d9b68a-480c-42fc-ae55-7c51709fd045 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.079723] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c204ef-f1f2-435a-ae5c-b536f527f85a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.094109] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.596531] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 679.101804] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 679.102095] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 13.468s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.102294] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.724s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.915902] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ccd09fb-2f93-4714-a6bc-6627cb29b973 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.923575] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a493e5d-d560-4f6a-bba9-22fca3ea8137 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.953770] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6235641-8b58-484f-9209-33d64f8203ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.961289] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b541994-d958-4ec3-a658-0941622f4537 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.974220] env[61898]: DEBUG nova.compute.provider_tree [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.478136] env[61898]: DEBUG nova.scheduler.client.report [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 680.983289] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.881s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.983951] env[61898]: ERROR nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 74cda50e-d23e-40f6-8e06-cbff437709e2, please check neutron logs for more information. [ 680.983951] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Traceback (most recent call last): [ 680.983951] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 680.983951] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] self.driver.spawn(context, instance, image_meta, [ 680.983951] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 680.983951] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 680.983951] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 680.983951] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] vm_ref = self.build_virtual_machine(instance, [ 680.983951] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 680.983951] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] vif_infos = vmwarevif.get_vif_info(self._session, [ 680.983951] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] for vif in network_info: [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] return self._sync_wrapper(fn, *args, **kwargs) [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] self.wait() [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] self[:] = self._gt.wait() [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] return self._exit_event.wait() [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] result = hub.switch() [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 680.984447] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] return self.greenlet.switch() [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] result = function(*args, **kwargs) [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] return func(*args, **kwargs) [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] raise e [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] nwinfo = self.network_api.allocate_for_instance( [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] created_port_ids = self._update_ports_for_instance( [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] with excutils.save_and_reraise_exception(): [ 680.984817] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.985198] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] self.force_reraise() [ 680.985198] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.985198] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] raise self.value [ 680.985198] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 680.985198] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] updated_port = self._update_port( [ 680.985198] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.985198] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] _ensure_no_port_binding_failure(port) [ 680.985198] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.985198] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] raise exception.PortBindingFailed(port_id=port['id']) [ 680.985198] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] nova.exception.PortBindingFailed: Binding failed for port 74cda50e-d23e-40f6-8e06-cbff437709e2, please check neutron logs for more information. [ 680.985198] env[61898]: ERROR nova.compute.manager [instance: c6e77597-5a5d-4b86-8588-7056828025cf] [ 680.985525] env[61898]: DEBUG nova.compute.utils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Binding failed for port 74cda50e-d23e-40f6-8e06-cbff437709e2, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 680.986064] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.074s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.987702] env[61898]: INFO nova.compute.claims [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 680.990452] env[61898]: DEBUG nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Build of instance c6e77597-5a5d-4b86-8588-7056828025cf was re-scheduled: Binding failed for port 74cda50e-d23e-40f6-8e06-cbff437709e2, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 680.990912] env[61898]: DEBUG nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 680.991155] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "refresh_cache-c6e77597-5a5d-4b86-8588-7056828025cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.991303] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquired lock "refresh_cache-c6e77597-5a5d-4b86-8588-7056828025cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.991458] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 681.517732] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.605224] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.109185] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Releasing lock "refresh_cache-c6e77597-5a5d-4b86-8588-7056828025cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.109415] env[61898]: DEBUG nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 682.109588] env[61898]: DEBUG nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 682.109751] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 682.124546] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.288032] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f17a9f-f5b6-4377-8c43-01b19df2a76e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.295715] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735bfe07-8046-44a2-b147-2cca0ebff52f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.325368] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96240e75-646c-41c7-a2b6-57fa115b4a17 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.332622] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750f1796-523a-4dd8-b1af-a4fd57f26c37 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.346168] env[61898]: DEBUG nova.compute.provider_tree [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.628731] env[61898]: DEBUG nova.network.neutron [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.848751] env[61898]: DEBUG nova.scheduler.client.report [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 683.134632] env[61898]: INFO nova.compute.manager [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: c6e77597-5a5d-4b86-8588-7056828025cf] Took 1.02 seconds to deallocate network for instance. [ 683.354228] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.355039] env[61898]: DEBUG nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 683.357295] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 34.258s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.357477] env[61898]: DEBUG nova.objects.instance [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61898) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 683.866932] env[61898]: DEBUG nova.compute.utils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 683.867277] env[61898]: DEBUG nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 683.867459] env[61898]: DEBUG nova.network.neutron [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 683.909418] env[61898]: DEBUG nova.policy [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc9ca74b28ed466dac6b2510cfc0403c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a14f54f60a5e4929bf7a64d8c2fcd6aa', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 684.154313] env[61898]: DEBUG nova.network.neutron [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Successfully created port: 94a1b793-fff2-4121-98c2-bb91979f77f2 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 684.166340] env[61898]: INFO nova.scheduler.client.report [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Deleted allocations for instance c6e77597-5a5d-4b86-8588-7056828025cf [ 684.369088] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fafad728-2e0f-45db-aa0e-ec4f66052664 tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.370243] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.188s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.371748] env[61898]: INFO nova.compute.claims [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 684.374581] env[61898]: DEBUG nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 684.678175] env[61898]: DEBUG oslo_concurrency.lockutils [None req-98723b94-3ad1-4c6f-bca5-a5b62f000fd8 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "c6e77597-5a5d-4b86-8588-7056828025cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.573s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.898144] env[61898]: DEBUG nova.compute.manager [req-95c64ba8-4bf3-427d-b743-323313e11ba0 req-24a19fdb-5edb-40ff-b0ba-ce63e07bf3e0 service nova] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Received event network-changed-94a1b793-fff2-4121-98c2-bb91979f77f2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 684.898144] env[61898]: DEBUG nova.compute.manager [req-95c64ba8-4bf3-427d-b743-323313e11ba0 req-24a19fdb-5edb-40ff-b0ba-ce63e07bf3e0 service nova] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Refreshing instance network info cache due to event network-changed-94a1b793-fff2-4121-98c2-bb91979f77f2. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 684.898144] env[61898]: DEBUG oslo_concurrency.lockutils [req-95c64ba8-4bf3-427d-b743-323313e11ba0 req-24a19fdb-5edb-40ff-b0ba-ce63e07bf3e0 service nova] Acquiring lock "refresh_cache-8ab18b24-91d4-4718-8f1a-d82f4226ba2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 684.898144] env[61898]: DEBUG oslo_concurrency.lockutils [req-95c64ba8-4bf3-427d-b743-323313e11ba0 req-24a19fdb-5edb-40ff-b0ba-ce63e07bf3e0 service nova] Acquired lock "refresh_cache-8ab18b24-91d4-4718-8f1a-d82f4226ba2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.898144] env[61898]: DEBUG nova.network.neutron [req-95c64ba8-4bf3-427d-b743-323313e11ba0 req-24a19fdb-5edb-40ff-b0ba-ce63e07bf3e0 service nova] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Refreshing network info cache for port 94a1b793-fff2-4121-98c2-bb91979f77f2 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 684.996859] env[61898]: ERROR nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 94a1b793-fff2-4121-98c2-bb91979f77f2, please check neutron logs for more information. [ 684.996859] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 684.996859] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 684.996859] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 684.996859] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.996859] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 684.996859] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.996859] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 684.996859] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.996859] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 684.996859] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.996859] env[61898]: ERROR nova.compute.manager raise self.value [ 684.996859] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.996859] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 684.996859] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.996859] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 684.997374] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.997374] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 684.997374] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 94a1b793-fff2-4121-98c2-bb91979f77f2, please check neutron logs for more information. [ 684.997374] env[61898]: ERROR nova.compute.manager [ 684.997374] env[61898]: Traceback (most recent call last): [ 684.997374] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 684.997374] env[61898]: listener.cb(fileno) [ 684.997374] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 684.997374] env[61898]: result = function(*args, **kwargs) [ 684.997374] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 684.997374] env[61898]: return func(*args, **kwargs) [ 684.997374] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 684.997374] env[61898]: raise e [ 684.997374] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 684.997374] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 684.997374] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 684.997374] env[61898]: created_port_ids = self._update_ports_for_instance( [ 684.997374] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 684.997374] env[61898]: with excutils.save_and_reraise_exception(): [ 684.997374] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 684.997374] env[61898]: self.force_reraise() [ 684.997374] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 684.997374] env[61898]: raise self.value [ 684.997374] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 684.997374] env[61898]: updated_port = self._update_port( [ 684.997374] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 684.997374] env[61898]: _ensure_no_port_binding_failure(port) [ 684.997374] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 684.997374] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 684.998137] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 94a1b793-fff2-4121-98c2-bb91979f77f2, please check neutron logs for more information. [ 684.998137] env[61898]: Removing descriptor: 20 [ 685.181091] env[61898]: DEBUG nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 685.388366] env[61898]: DEBUG nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 685.417559] env[61898]: DEBUG nova.virt.hardware [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 685.417799] env[61898]: DEBUG nova.virt.hardware [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 685.417952] env[61898]: DEBUG nova.virt.hardware [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 685.418146] env[61898]: DEBUG nova.virt.hardware [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 685.418288] env[61898]: DEBUG nova.virt.hardware [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 685.418431] env[61898]: DEBUG nova.virt.hardware [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 685.418667] env[61898]: DEBUG nova.virt.hardware [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 685.418843] env[61898]: DEBUG nova.virt.hardware [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 685.419022] env[61898]: DEBUG nova.virt.hardware [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 685.419189] env[61898]: DEBUG nova.virt.hardware [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 685.419358] env[61898]: DEBUG nova.virt.hardware [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 685.420616] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5738cdd-9b06-49e7-9482-106db8d4129a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.424796] env[61898]: DEBUG nova.network.neutron [req-95c64ba8-4bf3-427d-b743-323313e11ba0 req-24a19fdb-5edb-40ff-b0ba-ce63e07bf3e0 service nova] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.433960] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c060f409-d6d7-41b8-be80-072ab1e0746e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.452136] env[61898]: ERROR nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 94a1b793-fff2-4121-98c2-bb91979f77f2, please check neutron logs for more information. [ 685.452136] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Traceback (most recent call last): [ 685.452136] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 685.452136] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] yield resources [ 685.452136] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 685.452136] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] self.driver.spawn(context, instance, image_meta, [ 685.452136] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 685.452136] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.452136] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 685.452136] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] vm_ref = self.build_virtual_machine(instance, [ 685.452136] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] vif_infos = vmwarevif.get_vif_info(self._session, [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] for vif in network_info: [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] return self._sync_wrapper(fn, *args, **kwargs) [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] self.wait() [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] self[:] = self._gt.wait() [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] return self._exit_event.wait() [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 685.452474] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] current.throw(*self._exc) [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] result = function(*args, **kwargs) [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] return func(*args, **kwargs) [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] raise e [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] nwinfo = self.network_api.allocate_for_instance( [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] created_port_ids = self._update_ports_for_instance( [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] with excutils.save_and_reraise_exception(): [ 685.452852] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.453241] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] self.force_reraise() [ 685.453241] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.453241] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] raise self.value [ 685.453241] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 685.453241] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] updated_port = self._update_port( [ 685.453241] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.453241] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] _ensure_no_port_binding_failure(port) [ 685.453241] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.453241] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] raise exception.PortBindingFailed(port_id=port['id']) [ 685.453241] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] nova.exception.PortBindingFailed: Binding failed for port 94a1b793-fff2-4121-98c2-bb91979f77f2, please check neutron logs for more information. [ 685.453241] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] [ 685.453241] env[61898]: INFO nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Terminating instance [ 685.505306] env[61898]: DEBUG nova.network.neutron [req-95c64ba8-4bf3-427d-b743-323313e11ba0 req-24a19fdb-5edb-40ff-b0ba-ce63e07bf3e0 service nova] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.701090] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.736318] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f021c31f-a995-455f-9889-c234b6fc966e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.745328] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-569ad4a9-79e0-4b1c-a4f1-b9955af891de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.779879] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba4dd2f-0789-4d81-8f58-b72eaba36706 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.787673] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bc8eb8-a671-4b8e-b1bf-e7d574f3d153 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.801295] env[61898]: DEBUG nova.compute.provider_tree [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.958971] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Acquiring lock "refresh_cache-8ab18b24-91d4-4718-8f1a-d82f4226ba2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.008817] env[61898]: DEBUG oslo_concurrency.lockutils [req-95c64ba8-4bf3-427d-b743-323313e11ba0 req-24a19fdb-5edb-40ff-b0ba-ce63e07bf3e0 service nova] Releasing lock "refresh_cache-8ab18b24-91d4-4718-8f1a-d82f4226ba2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.011109] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Acquired lock "refresh_cache-8ab18b24-91d4-4718-8f1a-d82f4226ba2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.011109] env[61898]: DEBUG nova.network.neutron [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 686.305122] env[61898]: DEBUG nova.scheduler.client.report [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 686.530539] env[61898]: DEBUG nova.network.neutron [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 686.604338] env[61898]: DEBUG nova.network.neutron [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.810873] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.440s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.811967] env[61898]: DEBUG nova.compute.manager [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 686.814970] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.029s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.814970] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.817124] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.795s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.818143] env[61898]: INFO nova.compute.claims [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.849962] env[61898]: INFO nova.scheduler.client.report [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Deleted allocations for instance 23a0d825-3132-44d5-8b73-a06a0c0e7b1a [ 686.990025] env[61898]: DEBUG nova.compute.manager [req-334b47f2-03f8-4bf4-bcfb-08532de50545 req-aa070646-9f0e-478c-89d5-406cc24aba80 service nova] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Received event network-vif-deleted-94a1b793-fff2-4121-98c2-bb91979f77f2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 687.107102] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Releasing lock "refresh_cache-8ab18b24-91d4-4718-8f1a-d82f4226ba2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.107590] env[61898]: DEBUG nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 687.107789] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 687.108103] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad4c8b5f-4483-4501-a160-6261769cc0b2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.117761] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f098ec-e909-4244-a6b6-874d82fa7eaf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.140145] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8ab18b24-91d4-4718-8f1a-d82f4226ba2a could not be found. [ 687.140347] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 687.140557] env[61898]: INFO nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 687.140788] env[61898]: DEBUG oslo.service.loopingcall [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 687.141014] env[61898]: DEBUG nova.compute.manager [-] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 687.141110] env[61898]: DEBUG nova.network.neutron [-] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 687.155618] env[61898]: DEBUG nova.network.neutron [-] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 687.322721] env[61898]: DEBUG nova.compute.utils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 687.326123] env[61898]: DEBUG nova.compute.manager [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Not allocating networking since 'none' was specified. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 687.358667] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2e6e5b1b-f373-4584-b812-36069877f7ec tempest-ServerShowV254Test-1601862822 tempest-ServerShowV254Test-1601862822-project-member] Lock "23a0d825-3132-44d5-8b73-a06a0c0e7b1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.859s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.428487] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.429011] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.658052] env[61898]: DEBUG nova.network.neutron [-] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.827604] env[61898]: DEBUG nova.compute.manager [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 688.141291] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd66ac6a-7dcd-4aec-bdbb-0306c38337d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.149507] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ce478a-6051-4c6b-a62b-de8b115f5bfa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.177222] env[61898]: INFO nova.compute.manager [-] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Took 1.04 seconds to deallocate network for instance. [ 688.179547] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487cef58-aaf0-48d5-a0ab-3890a115928e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.182273] env[61898]: DEBUG nova.compute.claims [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 688.182450] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.187618] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96321517-d70c-45c8-9b3f-f6b3f3b129ad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.200243] env[61898]: DEBUG nova.compute.provider_tree [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.703744] env[61898]: DEBUG nova.scheduler.client.report [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 688.841745] env[61898]: DEBUG nova.compute.manager [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 688.862733] env[61898]: DEBUG nova.virt.hardware [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 688.862976] env[61898]: DEBUG nova.virt.hardware [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 688.863146] env[61898]: DEBUG nova.virt.hardware [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 688.863328] env[61898]: DEBUG nova.virt.hardware [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 688.863476] env[61898]: DEBUG nova.virt.hardware [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 688.863619] env[61898]: DEBUG nova.virt.hardware [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 688.863824] env[61898]: DEBUG nova.virt.hardware [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 688.863975] env[61898]: DEBUG nova.virt.hardware [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 688.864150] env[61898]: DEBUG nova.virt.hardware [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 688.864310] env[61898]: DEBUG nova.virt.hardware [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 688.864478] env[61898]: DEBUG nova.virt.hardware [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 688.865337] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5903e21f-2440-4cbf-a0ff-ea5d1ded51c5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.873501] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0627e1f0-24a9-4737-89c0-2aa844cf8129 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.889266] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 688.893847] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Creating folder: Project (f609d87b47074f9b807100d20b97f6a2). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 688.894139] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f27b2717-d513-4f6a-ae40-64743b0b140c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.905194] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Created folder: Project (f609d87b47074f9b807100d20b97f6a2) in parent group-v267550. [ 688.905379] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Creating folder: Instances. Parent ref: group-v267576. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 688.908603] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb8571ed-30fa-4f7f-809f-c55cf25ae94d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.914195] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Created folder: Instances in parent group-v267576. [ 688.914195] env[61898]: DEBUG oslo.service.loopingcall [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 688.914324] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 688.914501] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f86454d3-d688-4fed-b757-a88d7de0da0d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.931111] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 688.931111] env[61898]: value = "task-1240441" [ 688.931111] env[61898]: _type = "Task" [ 688.931111] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.938368] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240441, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.209598] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.393s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.210339] env[61898]: DEBUG nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 689.214630] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.299s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.353641] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquiring lock "ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 689.353641] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 689.444416] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240441, 'name': CreateVM_Task, 'duration_secs': 0.259154} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.444416] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 689.444670] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.444824] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.445174] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 689.445750] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1ff1c61-c415-4e82-9e0d-2e785f6b1168 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.450234] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for the task: (returnval){ [ 689.450234] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d27ad8-9975-b8b5-ecf2-7644ed637c29" [ 689.450234] env[61898]: _type = "Task" [ 689.450234] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.458921] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d27ad8-9975-b8b5-ecf2-7644ed637c29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.720662] env[61898]: DEBUG nova.compute.utils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 689.725606] env[61898]: DEBUG nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 689.725606] env[61898]: DEBUG nova.network.neutron [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.772546] env[61898]: DEBUG nova.policy [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080b70de4bd5465e8e696943b90f4ff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2c65efa327e403284ad2e78b3c7b7d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 689.962115] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d27ad8-9975-b8b5-ecf2-7644ed637c29, 'name': SearchDatastore_Task, 'duration_secs': 0.010371} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.964072] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.964324] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 689.964558] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.964702] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.964886] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 689.965324] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa9fed72-0181-4ec2-a645-76397b7ccba2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.973197] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 689.973381] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 689.976163] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3734888-1546-4dce-a6a8-af5447e1cead {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.983088] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for the task: (returnval){ [ 689.983088] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]524f4e19-d729-ad02-c029-98e633caa673" [ 689.983088] env[61898]: _type = "Task" [ 689.983088] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.988669] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524f4e19-d729-ad02-c029-98e633caa673, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.043381] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17c2c77-93f2-4af6-bfaa-81568c14a865 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.050806] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632cdc58-04ca-4b45-ab3b-fef5d542fe66 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.081673] env[61898]: DEBUG nova.network.neutron [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Successfully created port: 003c6988-67d6-4d40-8682-5d823dcc867c {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 690.083936] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a676411a-0018-4925-8f95-9ff0ec416852 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.091344] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3e21eb-0979-4764-94f1-1be214804133 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.104197] env[61898]: DEBUG nova.compute.provider_tree [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.225603] env[61898]: DEBUG nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 690.493416] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524f4e19-d729-ad02-c029-98e633caa673, 'name': SearchDatastore_Task, 'duration_secs': 0.008028} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.494475] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db4cb84b-dfb9-4db1-a0aa-95fd39476503 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.499524] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for the task: (returnval){ [ 690.499524] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52499782-9998-c1b8-d14e-27778ea452cf" [ 690.499524] env[61898]: _type = "Task" [ 690.499524] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.507311] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52499782-9998-c1b8-d14e-27778ea452cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.610513] env[61898]: DEBUG nova.scheduler.client.report [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 690.735877] env[61898]: DEBUG nova.compute.manager [req-8552394c-44fe-4b2a-bff7-5b0419ef144f req-f6053936-ceb9-4b16-b8d6-24f14ec95330 service nova] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Received event network-changed-003c6988-67d6-4d40-8682-5d823dcc867c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 690.736141] env[61898]: DEBUG nova.compute.manager [req-8552394c-44fe-4b2a-bff7-5b0419ef144f req-f6053936-ceb9-4b16-b8d6-24f14ec95330 service nova] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Refreshing instance network info cache due to event network-changed-003c6988-67d6-4d40-8682-5d823dcc867c. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 690.736400] env[61898]: DEBUG oslo_concurrency.lockutils [req-8552394c-44fe-4b2a-bff7-5b0419ef144f req-f6053936-ceb9-4b16-b8d6-24f14ec95330 service nova] Acquiring lock "refresh_cache-1f70b6e1-b534-40a1-b262-e0a5ce3e425e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.736590] env[61898]: DEBUG oslo_concurrency.lockutils [req-8552394c-44fe-4b2a-bff7-5b0419ef144f req-f6053936-ceb9-4b16-b8d6-24f14ec95330 service nova] Acquired lock "refresh_cache-1f70b6e1-b534-40a1-b262-e0a5ce3e425e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.736821] env[61898]: DEBUG nova.network.neutron [req-8552394c-44fe-4b2a-bff7-5b0419ef144f req-f6053936-ceb9-4b16-b8d6-24f14ec95330 service nova] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Refreshing network info cache for port 003c6988-67d6-4d40-8682-5d823dcc867c {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 690.914624] env[61898]: ERROR nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 003c6988-67d6-4d40-8682-5d823dcc867c, please check neutron logs for more information. [ 690.914624] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 690.914624] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 690.914624] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 690.914624] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 690.914624] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 690.914624] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 690.914624] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 690.914624] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.914624] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 690.914624] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.914624] env[61898]: ERROR nova.compute.manager raise self.value [ 690.914624] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 690.914624] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 690.914624] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.914624] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 690.915177] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.915177] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 690.915177] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 003c6988-67d6-4d40-8682-5d823dcc867c, please check neutron logs for more information. [ 690.915177] env[61898]: ERROR nova.compute.manager [ 690.915177] env[61898]: Traceback (most recent call last): [ 690.915177] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 690.915177] env[61898]: listener.cb(fileno) [ 690.915177] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 690.915177] env[61898]: result = function(*args, **kwargs) [ 690.915177] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 690.915177] env[61898]: return func(*args, **kwargs) [ 690.915177] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 690.915177] env[61898]: raise e [ 690.915177] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 690.915177] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 690.915177] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 690.915177] env[61898]: created_port_ids = self._update_ports_for_instance( [ 690.915177] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 690.915177] env[61898]: with excutils.save_and_reraise_exception(): [ 690.915177] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.915177] env[61898]: self.force_reraise() [ 690.915177] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.915177] env[61898]: raise self.value [ 690.915177] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 690.915177] env[61898]: updated_port = self._update_port( [ 690.915177] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.915177] env[61898]: _ensure_no_port_binding_failure(port) [ 690.915177] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.915177] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 690.916025] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 003c6988-67d6-4d40-8682-5d823dcc867c, please check neutron logs for more information. [ 690.916025] env[61898]: Removing descriptor: 20 [ 691.011690] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52499782-9998-c1b8-d14e-27778ea452cf, 'name': SearchDatastore_Task, 'duration_secs': 0.009175} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.011949] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.012215] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 81fd9ccc-a267-498d-93d4-8adf894ee8d8/81fd9ccc-a267-498d-93d4-8adf894ee8d8.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 691.012474] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-377b0c08-03f5-489f-9b32-b0ea6c7a9e3a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.018476] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for the task: (returnval){ [ 691.018476] env[61898]: value = "task-1240442" [ 691.018476] env[61898]: _type = "Task" [ 691.018476] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.026257] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.115067] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.900s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.115127] env[61898]: ERROR nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bcddd00e-804c-44a4-96d3-61b1e2ef3fbb, please check neutron logs for more information. [ 691.115127] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Traceback (most recent call last): [ 691.115127] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 691.115127] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] self.driver.spawn(context, instance, image_meta, [ 691.115127] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 691.115127] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 691.115127] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 691.115127] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] vm_ref = self.build_virtual_machine(instance, [ 691.115127] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 691.115127] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] vif_infos = vmwarevif.get_vif_info(self._session, [ 691.115127] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] for vif in network_info: [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] return self._sync_wrapper(fn, *args, **kwargs) [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] self.wait() [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] self[:] = self._gt.wait() [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] return self._exit_event.wait() [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] result = hub.switch() [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 691.115497] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] return self.greenlet.switch() [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] result = function(*args, **kwargs) [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] return func(*args, **kwargs) [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] raise e [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] nwinfo = self.network_api.allocate_for_instance( [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] created_port_ids = self._update_ports_for_instance( [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] with excutils.save_and_reraise_exception(): [ 691.116283] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.117142] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] self.force_reraise() [ 691.117142] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.117142] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] raise self.value [ 691.117142] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.117142] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] updated_port = self._update_port( [ 691.117142] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.117142] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] _ensure_no_port_binding_failure(port) [ 691.117142] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.117142] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] raise exception.PortBindingFailed(port_id=port['id']) [ 691.117142] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] nova.exception.PortBindingFailed: Binding failed for port bcddd00e-804c-44a4-96d3-61b1e2ef3fbb, please check neutron logs for more information. [ 691.117142] env[61898]: ERROR nova.compute.manager [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] [ 691.117574] env[61898]: DEBUG nova.compute.utils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Binding failed for port bcddd00e-804c-44a4-96d3-61b1e2ef3fbb, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 691.117574] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.331s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.118684] env[61898]: INFO nova.compute.claims [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 691.121571] env[61898]: DEBUG nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Build of instance 74a2e109-244c-4349-a0b7-0db9e9d4868e was re-scheduled: Binding failed for port bcddd00e-804c-44a4-96d3-61b1e2ef3fbb, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 691.121996] env[61898]: DEBUG nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 691.122242] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquiring lock "refresh_cache-74a2e109-244c-4349-a0b7-0db9e9d4868e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.122396] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Acquired lock "refresh_cache-74a2e109-244c-4349-a0b7-0db9e9d4868e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.122536] env[61898]: DEBUG nova.network.neutron [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 691.239835] env[61898]: DEBUG nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 691.260084] env[61898]: DEBUG nova.network.neutron [req-8552394c-44fe-4b2a-bff7-5b0419ef144f req-f6053936-ceb9-4b16-b8d6-24f14ec95330 service nova] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.271215] env[61898]: DEBUG nova.virt.hardware [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 691.271542] env[61898]: DEBUG nova.virt.hardware [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 691.271782] env[61898]: DEBUG nova.virt.hardware [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 691.272088] env[61898]: DEBUG nova.virt.hardware [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 691.272248] env[61898]: DEBUG nova.virt.hardware [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 691.272439] env[61898]: DEBUG nova.virt.hardware [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 691.272685] env[61898]: DEBUG nova.virt.hardware [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 691.272855] env[61898]: DEBUG nova.virt.hardware [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 691.273063] env[61898]: DEBUG nova.virt.hardware [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 691.273342] env[61898]: DEBUG nova.virt.hardware [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 691.273521] env[61898]: DEBUG nova.virt.hardware [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 691.274686] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb875aa7-183e-441d-b82a-d8aa4b5bb341 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.284312] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b307fe4e-2062-49c7-939d-7cfd60d31563 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.302565] env[61898]: ERROR nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 003c6988-67d6-4d40-8682-5d823dcc867c, please check neutron logs for more information. [ 691.302565] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Traceback (most recent call last): [ 691.302565] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 691.302565] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] yield resources [ 691.302565] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 691.302565] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] self.driver.spawn(context, instance, image_meta, [ 691.302565] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 691.302565] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 691.302565] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 691.302565] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] vm_ref = self.build_virtual_machine(instance, [ 691.302565] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] vif_infos = vmwarevif.get_vif_info(self._session, [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] for vif in network_info: [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] return self._sync_wrapper(fn, *args, **kwargs) [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] self.wait() [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] self[:] = self._gt.wait() [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] return self._exit_event.wait() [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 691.303111] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] current.throw(*self._exc) [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] result = function(*args, **kwargs) [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] return func(*args, **kwargs) [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] raise e [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] nwinfo = self.network_api.allocate_for_instance( [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] created_port_ids = self._update_ports_for_instance( [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] with excutils.save_and_reraise_exception(): [ 691.303554] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.303936] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] self.force_reraise() [ 691.303936] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.303936] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] raise self.value [ 691.303936] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 691.303936] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] updated_port = self._update_port( [ 691.303936] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.303936] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] _ensure_no_port_binding_failure(port) [ 691.303936] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.303936] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] raise exception.PortBindingFailed(port_id=port['id']) [ 691.303936] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] nova.exception.PortBindingFailed: Binding failed for port 003c6988-67d6-4d40-8682-5d823dcc867c, please check neutron logs for more information. [ 691.303936] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] [ 691.303936] env[61898]: INFO nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Terminating instance [ 691.361797] env[61898]: DEBUG nova.network.neutron [req-8552394c-44fe-4b2a-bff7-5b0419ef144f req-f6053936-ceb9-4b16-b8d6-24f14ec95330 service nova] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.528320] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240442, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.428467} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.528636] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 81fd9ccc-a267-498d-93d4-8adf894ee8d8/81fd9ccc-a267-498d-93d4-8adf894ee8d8.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 691.528833] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 691.529110] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-422f328e-cde0-4a6e-9d0f-fcdf5f1f54a6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.535495] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for the task: (returnval){ [ 691.535495] env[61898]: value = "task-1240443" [ 691.535495] env[61898]: _type = "Task" [ 691.535495] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.542828] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240443, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.642207] env[61898]: DEBUG nova.network.neutron [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 691.713929] env[61898]: DEBUG nova.network.neutron [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.811207] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-1f70b6e1-b534-40a1-b262-e0a5ce3e425e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.865525] env[61898]: DEBUG oslo_concurrency.lockutils [req-8552394c-44fe-4b2a-bff7-5b0419ef144f req-f6053936-ceb9-4b16-b8d6-24f14ec95330 service nova] Releasing lock "refresh_cache-1f70b6e1-b534-40a1-b262-e0a5ce3e425e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.865525] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-1f70b6e1-b534-40a1-b262-e0a5ce3e425e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.865525] env[61898]: DEBUG nova.network.neutron [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.045303] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240443, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065549} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.045559] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 692.046374] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ef82c2-fa56-45ea-ac1d-63ee24232033 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.067382] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 81fd9ccc-a267-498d-93d4-8adf894ee8d8/81fd9ccc-a267-498d-93d4-8adf894ee8d8.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 692.067657] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21db67d2-736d-473f-a05c-43c8b108f176 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.087392] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for the task: (returnval){ [ 692.087392] env[61898]: value = "task-1240444" [ 692.087392] env[61898]: _type = "Task" [ 692.087392] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.095317] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.219799] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Releasing lock "refresh_cache-74a2e109-244c-4349-a0b7-0db9e9d4868e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.220081] env[61898]: DEBUG nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 692.220274] env[61898]: DEBUG nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 692.220442] env[61898]: DEBUG nova.network.neutron [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 692.236050] env[61898]: DEBUG nova.network.neutron [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.383340] env[61898]: DEBUG nova.network.neutron [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.450205] env[61898]: DEBUG nova.network.neutron [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.452615] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0949e89c-e9e4-45d2-b95e-aad6f9d00b6f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.460762] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d814d17f-e874-46af-bc4c-85034d86bb93 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.490972] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2e79d6-0b04-4da6-ac4a-f96be3b15893 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.497916] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ca3ecb-b475-486e-8f60-943d0b68470f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.512150] env[61898]: DEBUG nova.compute.provider_tree [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.596867] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240444, 'name': ReconfigVM_Task, 'duration_secs': 0.28282} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.597113] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 81fd9ccc-a267-498d-93d4-8adf894ee8d8/81fd9ccc-a267-498d-93d4-8adf894ee8d8.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 692.597867] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71fdc476-e5aa-40ff-a1c8-72f6df63164b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.604110] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for the task: (returnval){ [ 692.604110] env[61898]: value = "task-1240445" [ 692.604110] env[61898]: _type = "Task" [ 692.604110] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.612730] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240445, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.739428] env[61898]: DEBUG nova.network.neutron [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.761078] env[61898]: DEBUG nova.compute.manager [req-80ee4c37-e169-4a19-b463-c44d20827778 req-d8911491-4962-4e72-ad52-4799b4a8d178 service nova] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Received event network-vif-deleted-003c6988-67d6-4d40-8682-5d823dcc867c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 692.955788] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-1f70b6e1-b534-40a1-b262-e0a5ce3e425e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.956253] env[61898]: DEBUG nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 692.956454] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 692.956760] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88e34f9e-52db-40be-b3d1-9d47ce6cfc0f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.965845] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e00a58-b47a-493b-a9fd-cf5e04de58fc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.987415] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1f70b6e1-b534-40a1-b262-e0a5ce3e425e could not be found. [ 692.987635] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 692.987819] env[61898]: INFO nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 692.988064] env[61898]: DEBUG oslo.service.loopingcall [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 692.988298] env[61898]: DEBUG nova.compute.manager [-] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 692.988391] env[61898]: DEBUG nova.network.neutron [-] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.002960] env[61898]: DEBUG nova.network.neutron [-] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.014275] env[61898]: DEBUG nova.scheduler.client.report [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 693.114026] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240445, 'name': Rename_Task, 'duration_secs': 0.137055} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.114238] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 693.114493] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c2ff17e-2fae-4edd-982d-d9afe0332a77 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.120343] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for the task: (returnval){ [ 693.120343] env[61898]: value = "task-1240446" [ 693.120343] env[61898]: _type = "Task" [ 693.120343] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.127945] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240446, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.242600] env[61898]: INFO nova.compute.manager [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] [instance: 74a2e109-244c-4349-a0b7-0db9e9d4868e] Took 1.02 seconds to deallocate network for instance. [ 693.505651] env[61898]: DEBUG nova.network.neutron [-] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.518749] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.519449] env[61898]: DEBUG nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 693.522372] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.527s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.630027] env[61898]: DEBUG oslo_vmware.api [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240446, 'name': PowerOnVM_Task, 'duration_secs': 0.427424} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.630027] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 693.630027] env[61898]: INFO nova.compute.manager [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Took 4.79 seconds to spawn the instance on the hypervisor. [ 693.630246] env[61898]: DEBUG nova.compute.manager [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 693.630926] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ad4142-e820-4b02-aec5-e9b0d5502195 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.009050] env[61898]: INFO nova.compute.manager [-] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Took 1.02 seconds to deallocate network for instance. [ 694.013089] env[61898]: DEBUG nova.compute.claims [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 694.013344] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 694.026842] env[61898]: DEBUG nova.compute.utils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 694.031262] env[61898]: DEBUG nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 694.031431] env[61898]: DEBUG nova.network.neutron [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 694.071224] env[61898]: DEBUG nova.policy [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07b7c3596b504569990fb80eb33e6595', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72688518a0bd478e97a72bf725a573ce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 694.148155] env[61898]: INFO nova.compute.manager [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Took 41.99 seconds to build instance. [ 694.279761] env[61898]: INFO nova.scheduler.client.report [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Deleted allocations for instance 74a2e109-244c-4349-a0b7-0db9e9d4868e [ 694.421330] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e969cd6-84d9-4de8-9a92-3977038642de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.425665] env[61898]: DEBUG nova.network.neutron [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Successfully created port: f2ea5526-d249-482e-9ddf-7aa51259834a {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 694.432888] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ce269e-4759-4380-8e14-56ed8c860552 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.463401] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c580e7-dba3-499c-b6ba-0089409b629f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.471155] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9eeb675-28e5-4b17-8b22-d246a9d43526 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.485393] env[61898]: DEBUG nova.compute.provider_tree [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.532202] env[61898]: DEBUG nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 694.652585] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2135f60a-e8b1-4316-b3d3-15a11a8b2db8 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lock "81fd9ccc-a267-498d-93d4-8adf894ee8d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 130.579s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.795794] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ee1ef589-6e63-4d71-9634-426a1d262f68 tempest-ServerRescueNegativeTestJSON-577494169 tempest-ServerRescueNegativeTestJSON-577494169-project-member] Lock "74a2e109-244c-4349-a0b7-0db9e9d4868e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.861s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 694.988191] env[61898]: DEBUG nova.scheduler.client.report [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 695.155866] env[61898]: DEBUG nova.compute.manager [None req-1f8fd327-3a15-4c5d-94c8-fdbcb9a49d2a tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 695.159061] env[61898]: DEBUG nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 695.159345] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450cdec6-1eaf-4350-96b2-be9073e5cf52 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.294276] env[61898]: DEBUG nova.compute.manager [req-298f60f4-5b77-4924-bc55-faf78708b91b req-2ba03248-0a13-4804-9084-2b4af599c53b service nova] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Received event network-changed-f2ea5526-d249-482e-9ddf-7aa51259834a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 695.294276] env[61898]: DEBUG nova.compute.manager [req-298f60f4-5b77-4924-bc55-faf78708b91b req-2ba03248-0a13-4804-9084-2b4af599c53b service nova] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Refreshing instance network info cache due to event network-changed-f2ea5526-d249-482e-9ddf-7aa51259834a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 695.294276] env[61898]: DEBUG oslo_concurrency.lockutils [req-298f60f4-5b77-4924-bc55-faf78708b91b req-2ba03248-0a13-4804-9084-2b4af599c53b service nova] Acquiring lock "refresh_cache-72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.294276] env[61898]: DEBUG oslo_concurrency.lockutils [req-298f60f4-5b77-4924-bc55-faf78708b91b req-2ba03248-0a13-4804-9084-2b4af599c53b service nova] Acquired lock "refresh_cache-72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.294276] env[61898]: DEBUG nova.network.neutron [req-298f60f4-5b77-4924-bc55-faf78708b91b req-2ba03248-0a13-4804-9084-2b4af599c53b service nova] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Refreshing network info cache for port f2ea5526-d249-482e-9ddf-7aa51259834a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 695.298324] env[61898]: DEBUG nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 695.302100] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquiring lock "81fd9ccc-a267-498d-93d4-8adf894ee8d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.302514] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lock "81fd9ccc-a267-498d-93d4-8adf894ee8d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.302867] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquiring lock "81fd9ccc-a267-498d-93d4-8adf894ee8d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.303202] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lock "81fd9ccc-a267-498d-93d4-8adf894ee8d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.303493] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lock "81fd9ccc-a267-498d-93d4-8adf894ee8d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.306743] env[61898]: INFO nova.compute.manager [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Terminating instance [ 695.317541] env[61898]: ERROR nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f2ea5526-d249-482e-9ddf-7aa51259834a, please check neutron logs for more information. [ 695.317541] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 695.317541] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 695.317541] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 695.317541] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.317541] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 695.317541] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.317541] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 695.317541] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.317541] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 695.317541] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.317541] env[61898]: ERROR nova.compute.manager raise self.value [ 695.317541] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.317541] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 695.317541] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.317541] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 695.318062] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.318062] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 695.318062] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f2ea5526-d249-482e-9ddf-7aa51259834a, please check neutron logs for more information. [ 695.318062] env[61898]: ERROR nova.compute.manager [ 695.318062] env[61898]: Traceback (most recent call last): [ 695.318062] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 695.318062] env[61898]: listener.cb(fileno) [ 695.318062] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 695.318062] env[61898]: result = function(*args, **kwargs) [ 695.318062] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 695.318062] env[61898]: return func(*args, **kwargs) [ 695.318062] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 695.318062] env[61898]: raise e [ 695.318062] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 695.318062] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 695.318062] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.318062] env[61898]: created_port_ids = self._update_ports_for_instance( [ 695.318062] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.318062] env[61898]: with excutils.save_and_reraise_exception(): [ 695.318062] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.318062] env[61898]: self.force_reraise() [ 695.318062] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.318062] env[61898]: raise self.value [ 695.318062] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.318062] env[61898]: updated_port = self._update_port( [ 695.318062] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.318062] env[61898]: _ensure_no_port_binding_failure(port) [ 695.318062] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.318062] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 695.318911] env[61898]: nova.exception.PortBindingFailed: Binding failed for port f2ea5526-d249-482e-9ddf-7aa51259834a, please check neutron logs for more information. [ 695.318911] env[61898]: Removing descriptor: 20 [ 695.493014] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.970s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.493756] env[61898]: ERROR nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3f69a569-f8a4-4ab1-81f5-2e4dd9623732, please check neutron logs for more information. [ 695.493756] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Traceback (most recent call last): [ 695.493756] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 695.493756] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] self.driver.spawn(context, instance, image_meta, [ 695.493756] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 695.493756] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] self._vmops.spawn(context, instance, image_meta, injected_files, [ 695.493756] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 695.493756] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] vm_ref = self.build_virtual_machine(instance, [ 695.493756] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 695.493756] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] vif_infos = vmwarevif.get_vif_info(self._session, [ 695.493756] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] for vif in network_info: [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] return self._sync_wrapper(fn, *args, **kwargs) [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] self.wait() [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] self[:] = self._gt.wait() [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] return self._exit_event.wait() [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] current.throw(*self._exc) [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 695.494134] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] result = function(*args, **kwargs) [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] return func(*args, **kwargs) [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] raise e [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] nwinfo = self.network_api.allocate_for_instance( [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] created_port_ids = self._update_ports_for_instance( [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] with excutils.save_and_reraise_exception(): [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] self.force_reraise() [ 695.494466] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.494786] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] raise self.value [ 695.494786] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.494786] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] updated_port = self._update_port( [ 695.494786] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.494786] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] _ensure_no_port_binding_failure(port) [ 695.494786] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.494786] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] raise exception.PortBindingFailed(port_id=port['id']) [ 695.494786] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] nova.exception.PortBindingFailed: Binding failed for port 3f69a569-f8a4-4ab1-81f5-2e4dd9623732, please check neutron logs for more information. [ 695.494786] env[61898]: ERROR nova.compute.manager [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] [ 695.494786] env[61898]: DEBUG nova.compute.utils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Binding failed for port 3f69a569-f8a4-4ab1-81f5-2e4dd9623732, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 695.496091] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.333s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.499411] env[61898]: DEBUG nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Build of instance 2cfdb95a-8c00-4528-a4bc-55f4ced67a89 was re-scheduled: Binding failed for port 3f69a569-f8a4-4ab1-81f5-2e4dd9623732, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 695.499882] env[61898]: DEBUG nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 695.500147] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquiring lock "refresh_cache-2cfdb95a-8c00-4528-a4bc-55f4ced67a89" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.500316] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Acquired lock "refresh_cache-2cfdb95a-8c00-4528-a4bc-55f4ced67a89" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.500598] env[61898]: DEBUG nova.network.neutron [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 695.543771] env[61898]: DEBUG nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 695.568351] env[61898]: DEBUG nova.virt.hardware [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:55:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1019801332',id=36,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-186954947',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 695.568726] env[61898]: DEBUG nova.virt.hardware [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 695.568884] env[61898]: DEBUG nova.virt.hardware [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 695.569079] env[61898]: DEBUG nova.virt.hardware [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 695.569224] env[61898]: DEBUG nova.virt.hardware [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 695.569369] env[61898]: DEBUG nova.virt.hardware [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 695.569585] env[61898]: DEBUG nova.virt.hardware [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 695.570363] env[61898]: DEBUG nova.virt.hardware [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 695.570363] env[61898]: DEBUG nova.virt.hardware [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 695.570526] env[61898]: DEBUG nova.virt.hardware [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 695.570653] env[61898]: DEBUG nova.virt.hardware [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 695.572260] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82818fc6-1d89-4b94-a201-582bff87ee41 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.580127] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7d6a00-08f2-4beb-8668-1715dbaaf800 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.595341] env[61898]: ERROR nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f2ea5526-d249-482e-9ddf-7aa51259834a, please check neutron logs for more information. [ 695.595341] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Traceback (most recent call last): [ 695.595341] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 695.595341] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] yield resources [ 695.595341] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 695.595341] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] self.driver.spawn(context, instance, image_meta, [ 695.595341] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 695.595341] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 695.595341] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 695.595341] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] vm_ref = self.build_virtual_machine(instance, [ 695.595341] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] vif_infos = vmwarevif.get_vif_info(self._session, [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] for vif in network_info: [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] return self._sync_wrapper(fn, *args, **kwargs) [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] self.wait() [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] self[:] = self._gt.wait() [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] return self._exit_event.wait() [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 695.595656] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] current.throw(*self._exc) [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] result = function(*args, **kwargs) [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] return func(*args, **kwargs) [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] raise e [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] nwinfo = self.network_api.allocate_for_instance( [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] created_port_ids = self._update_ports_for_instance( [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] with excutils.save_and_reraise_exception(): [ 695.595981] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 695.596338] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] self.force_reraise() [ 695.596338] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 695.596338] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] raise self.value [ 695.596338] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 695.596338] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] updated_port = self._update_port( [ 695.596338] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 695.596338] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] _ensure_no_port_binding_failure(port) [ 695.596338] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 695.596338] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] raise exception.PortBindingFailed(port_id=port['id']) [ 695.596338] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] nova.exception.PortBindingFailed: Binding failed for port f2ea5526-d249-482e-9ddf-7aa51259834a, please check neutron logs for more information. [ 695.596338] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] [ 695.596338] env[61898]: INFO nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Terminating instance [ 695.669714] env[61898]: INFO nova.compute.manager [None req-1f8fd327-3a15-4c5d-94c8-fdbcb9a49d2a tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] instance snapshotting [ 695.669828] env[61898]: DEBUG nova.objects.instance [None req-1f8fd327-3a15-4c5d-94c8-fdbcb9a49d2a tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lazy-loading 'flavor' on Instance uuid 81fd9ccc-a267-498d-93d4-8adf894ee8d8 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 695.685074] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.812440] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquiring lock "refresh_cache-81fd9ccc-a267-498d-93d4-8adf894ee8d8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 695.812675] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquired lock "refresh_cache-81fd9ccc-a267-498d-93d4-8adf894ee8d8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.812853] env[61898]: DEBUG nova.network.neutron [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 695.816249] env[61898]: DEBUG nova.network.neutron [req-298f60f4-5b77-4924-bc55-faf78708b91b req-2ba03248-0a13-4804-9084-2b4af599c53b service nova] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 695.820985] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 695.894133] env[61898]: DEBUG nova.network.neutron [req-298f60f4-5b77-4924-bc55-faf78708b91b req-2ba03248-0a13-4804-9084-2b4af599c53b service nova] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.029671] env[61898]: DEBUG nova.network.neutron [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.100019] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Acquiring lock "refresh_cache-72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 696.114919] env[61898]: DEBUG nova.network.neutron [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.178437] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c13359e-1e60-4d69-a5d6-00abddc59e62 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.201232] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741e7fd6-cb2c-49ce-bbb2-4dca0b9709f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.334215] env[61898]: DEBUG nova.network.neutron [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.397719] env[61898]: DEBUG oslo_concurrency.lockutils [req-298f60f4-5b77-4924-bc55-faf78708b91b req-2ba03248-0a13-4804-9084-2b4af599c53b service nova] Releasing lock "refresh_cache-72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.398688] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Acquired lock "refresh_cache-72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.398886] env[61898]: DEBUG nova.network.neutron [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 696.406916] env[61898]: DEBUG nova.network.neutron [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.431909] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100bdc96-10b7-4a59-b027-12687fdf7817 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.441237] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad357a24-b385-47fe-b4ed-dbdfebbad6f2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.475989] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04ce66f-df93-4eeb-9376-08cf0e7cb340 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.484300] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d7ae27-de8a-4921-865b-045ac6e41128 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.498357] env[61898]: DEBUG nova.compute.provider_tree [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.617969] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Releasing lock "refresh_cache-2cfdb95a-8c00-4528-a4bc-55f4ced67a89" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.618464] env[61898]: DEBUG nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 696.618592] env[61898]: DEBUG nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 696.618836] env[61898]: DEBUG nova.network.neutron [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 696.636762] env[61898]: DEBUG nova.network.neutron [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.714225] env[61898]: DEBUG nova.compute.manager [None req-1f8fd327-3a15-4c5d-94c8-fdbcb9a49d2a tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Instance disappeared during snapshot {{(pid=61898) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4580}} [ 696.851137] env[61898]: DEBUG nova.compute.manager [None req-1f8fd327-3a15-4c5d-94c8-fdbcb9a49d2a tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Found 0 images (rotation: 2) {{(pid=61898) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4883}} [ 696.909770] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Releasing lock "refresh_cache-81fd9ccc-a267-498d-93d4-8adf894ee8d8" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 696.909770] env[61898]: DEBUG nova.compute.manager [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 696.909770] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 696.910679] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee93cb2-a075-45f0-a3c7-a0a6dedf41ab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.919610] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 696.919610] env[61898]: DEBUG nova.network.neutron [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.922033] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-812f5abf-f2cd-40e3-ab8b-6cf46fb61e97 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.928686] env[61898]: DEBUG oslo_vmware.api [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for the task: (returnval){ [ 696.928686] env[61898]: value = "task-1240447" [ 696.928686] env[61898]: _type = "Task" [ 696.928686] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.941817] env[61898]: DEBUG oslo_vmware.api [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.001193] env[61898]: DEBUG nova.scheduler.client.report [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 697.050980] env[61898]: DEBUG nova.network.neutron [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.140792] env[61898]: DEBUG nova.network.neutron [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.343325] env[61898]: DEBUG nova.compute.manager [req-967fa711-1d2b-430a-a8ed-b8a84b502e0e req-3e1f748f-0832-421a-921c-c11280a88292 service nova] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Received event network-vif-deleted-f2ea5526-d249-482e-9ddf-7aa51259834a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 697.440123] env[61898]: DEBUG oslo_vmware.api [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240447, 'name': PowerOffVM_Task, 'duration_secs': 0.138545} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.440710] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 697.440710] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 697.440838] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d6cd1ea-5cbd-4cc4-9f21-8f3fbd0f2543 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.467145] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 697.467271] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 697.467447] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Deleting the datastore file [datastore2] 81fd9ccc-a267-498d-93d4-8adf894ee8d8 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 697.467703] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f1d8046-9b6f-462b-a1f5-1bfff629ccf3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.474284] env[61898]: DEBUG oslo_vmware.api [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for the task: (returnval){ [ 697.474284] env[61898]: value = "task-1240449" [ 697.474284] env[61898]: _type = "Task" [ 697.474284] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.482187] env[61898]: DEBUG oslo_vmware.api [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240449, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.506259] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.010s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.506910] env[61898]: ERROR nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 515d2f3e-160a-4cef-a9c3-7c1902058771, please check neutron logs for more information. [ 697.506910] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Traceback (most recent call last): [ 697.506910] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 697.506910] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] self.driver.spawn(context, instance, image_meta, [ 697.506910] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 697.506910] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.506910] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.506910] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] vm_ref = self.build_virtual_machine(instance, [ 697.506910] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.506910] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.506910] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] for vif in network_info: [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] return self._sync_wrapper(fn, *args, **kwargs) [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] self.wait() [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] self[:] = self._gt.wait() [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] return self._exit_event.wait() [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] current.throw(*self._exc) [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.507457] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] result = function(*args, **kwargs) [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] return func(*args, **kwargs) [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] raise e [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] nwinfo = self.network_api.allocate_for_instance( [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] created_port_ids = self._update_ports_for_instance( [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] with excutils.save_and_reraise_exception(): [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] self.force_reraise() [ 697.507782] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.508144] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] raise self.value [ 697.508144] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 697.508144] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] updated_port = self._update_port( [ 697.508144] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.508144] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] _ensure_no_port_binding_failure(port) [ 697.508144] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.508144] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] raise exception.PortBindingFailed(port_id=port['id']) [ 697.508144] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] nova.exception.PortBindingFailed: Binding failed for port 515d2f3e-160a-4cef-a9c3-7c1902058771, please check neutron logs for more information. [ 697.508144] env[61898]: ERROR nova.compute.manager [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] [ 697.508144] env[61898]: DEBUG nova.compute.utils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Binding failed for port 515d2f3e-160a-4cef-a9c3-7c1902058771, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.509022] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 29.105s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.512196] env[61898]: DEBUG nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Build of instance 6fc82922-9142-475b-99a6-bbc5ee43b30b was re-scheduled: Binding failed for port 515d2f3e-160a-4cef-a9c3-7c1902058771, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 697.512648] env[61898]: DEBUG nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 697.512911] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Acquiring lock "refresh_cache-6fc82922-9142-475b-99a6-bbc5ee43b30b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.513087] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Acquired lock "refresh_cache-6fc82922-9142-475b-99a6-bbc5ee43b30b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.513267] env[61898]: DEBUG nova.network.neutron [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 697.553032] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Releasing lock "refresh_cache-72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.553654] env[61898]: DEBUG nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 697.553720] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 697.554258] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5530eae-26a1-4088-8cde-7b26e5dd7eb3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.564266] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4833fa81-47cb-4cd5-a5fc-b7a36e12bc4c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.586988] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a could not be found. [ 697.587240] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 697.587454] env[61898]: INFO nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 697.588037] env[61898]: DEBUG oslo.service.loopingcall [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 697.588037] env[61898]: DEBUG nova.compute.manager [-] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 697.588037] env[61898]: DEBUG nova.network.neutron [-] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 697.606207] env[61898]: DEBUG nova.network.neutron [-] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.644035] env[61898]: INFO nova.compute.manager [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] [instance: 2cfdb95a-8c00-4528-a4bc-55f4ced67a89] Took 1.03 seconds to deallocate network for instance. [ 697.985469] env[61898]: DEBUG oslo_vmware.api [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Task: {'id': task-1240449, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086731} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.985780] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 697.986015] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 697.986254] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 697.986469] env[61898]: INFO nova.compute.manager [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Took 1.08 seconds to destroy the instance on the hypervisor. [ 697.986749] env[61898]: DEBUG oslo.service.loopingcall [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 697.986977] env[61898]: DEBUG nova.compute.manager [-] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 697.987095] env[61898]: DEBUG nova.network.neutron [-] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 698.001986] env[61898]: DEBUG nova.network.neutron [-] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.033738] env[61898]: DEBUG nova.network.neutron [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.109031] env[61898]: DEBUG nova.network.neutron [-] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.243343] env[61898]: DEBUG nova.network.neutron [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.340635] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a71f31-295e-4a05-98c6-2636ba73b72f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.348416] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8986fbe8-4cf2-412e-b860-7d5ddc740cc3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.380305] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61447f9-210f-4b43-a258-946b3a1965ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.388242] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0449281-3b8d-43a5-a64c-dee42b1d6b32 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.402356] env[61898]: DEBUG nova.compute.provider_tree [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.504468] env[61898]: DEBUG nova.network.neutron [-] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.610891] env[61898]: INFO nova.compute.manager [-] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Took 1.02 seconds to deallocate network for instance. [ 698.613339] env[61898]: DEBUG nova.compute.claims [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 698.613525] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.670989] env[61898]: INFO nova.scheduler.client.report [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Deleted allocations for instance 2cfdb95a-8c00-4528-a4bc-55f4ced67a89 [ 698.746417] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Releasing lock "refresh_cache-6fc82922-9142-475b-99a6-bbc5ee43b30b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.746514] env[61898]: DEBUG nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 698.746699] env[61898]: DEBUG nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 698.746872] env[61898]: DEBUG nova.network.neutron [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 698.763463] env[61898]: DEBUG nova.network.neutron [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.905746] env[61898]: DEBUG nova.scheduler.client.report [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 699.007237] env[61898]: INFO nova.compute.manager [-] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Took 1.02 seconds to deallocate network for instance. [ 699.179313] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5044cff2-8eed-4c85-aa02-b3943c744792 tempest-SecurityGroupsTestJSON-727174544 tempest-SecurityGroupsTestJSON-727174544-project-member] Lock "2cfdb95a-8c00-4528-a4bc-55f4ced67a89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.609s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.265631] env[61898]: DEBUG nova.network.neutron [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.410831] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.902s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.411527] env[61898]: ERROR nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f19c6e84-b5a3-4f3f-b5de-27147c797d39, please check neutron logs for more information. [ 699.411527] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Traceback (most recent call last): [ 699.411527] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 699.411527] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] self.driver.spawn(context, instance, image_meta, [ 699.411527] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 699.411527] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 699.411527] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 699.411527] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] vm_ref = self.build_virtual_machine(instance, [ 699.411527] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 699.411527] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] vif_infos = vmwarevif.get_vif_info(self._session, [ 699.411527] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] for vif in network_info: [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] return self._sync_wrapper(fn, *args, **kwargs) [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] self.wait() [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] self[:] = self._gt.wait() [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] return self._exit_event.wait() [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] current.throw(*self._exc) [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 699.411903] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] result = function(*args, **kwargs) [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] return func(*args, **kwargs) [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] raise e [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] nwinfo = self.network_api.allocate_for_instance( [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] created_port_ids = self._update_ports_for_instance( [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] with excutils.save_and_reraise_exception(): [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] self.force_reraise() [ 699.412321] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.412701] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] raise self.value [ 699.412701] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 699.412701] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] updated_port = self._update_port( [ 699.412701] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.412701] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] _ensure_no_port_binding_failure(port) [ 699.412701] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.412701] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] raise exception.PortBindingFailed(port_id=port['id']) [ 699.412701] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] nova.exception.PortBindingFailed: Binding failed for port f19c6e84-b5a3-4f3f-b5de-27147c797d39, please check neutron logs for more information. [ 699.412701] env[61898]: ERROR nova.compute.manager [instance: 31239011-3cd9-4fea-a99d-26d09884497b] [ 699.412701] env[61898]: DEBUG nova.compute.utils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Binding failed for port f19c6e84-b5a3-4f3f-b5de-27147c797d39, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 699.413542] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.073s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.415014] env[61898]: INFO nova.compute.claims [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 699.417628] env[61898]: DEBUG nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Build of instance 31239011-3cd9-4fea-a99d-26d09884497b was re-scheduled: Binding failed for port f19c6e84-b5a3-4f3f-b5de-27147c797d39, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 699.418420] env[61898]: DEBUG nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 699.418420] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Acquiring lock "refresh_cache-31239011-3cd9-4fea-a99d-26d09884497b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.418544] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Acquired lock "refresh_cache-31239011-3cd9-4fea-a99d-26d09884497b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.418833] env[61898]: DEBUG nova.network.neutron [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.513749] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.681760] env[61898]: DEBUG nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 699.768061] env[61898]: INFO nova.compute.manager [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] [instance: 6fc82922-9142-475b-99a6-bbc5ee43b30b] Took 1.02 seconds to deallocate network for instance. [ 699.938523] env[61898]: DEBUG nova.network.neutron [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.020708] env[61898]: DEBUG nova.network.neutron [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.201330] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.523372] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Releasing lock "refresh_cache-31239011-3cd9-4fea-a99d-26d09884497b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.523605] env[61898]: DEBUG nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 700.523784] env[61898]: DEBUG nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 700.523947] env[61898]: DEBUG nova.network.neutron [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.541739] env[61898]: DEBUG nova.network.neutron [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.732997] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a090e2ca-8382-4c15-b1fa-8fe10e64b37c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.741872] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e16958-7c62-42c0-98a9-5d3b53429a1b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.774026] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96a4809-65c5-4011-af27-1a48232768d1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.784394] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60241cc0-5177-4952-9f7f-26dbcf239481 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.799320] env[61898]: DEBUG nova.compute.provider_tree [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.801176] env[61898]: INFO nova.scheduler.client.report [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Deleted allocations for instance 6fc82922-9142-475b-99a6-bbc5ee43b30b [ 701.044223] env[61898]: DEBUG nova.network.neutron [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.308210] env[61898]: DEBUG nova.scheduler.client.report [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 701.311548] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ea5445fd-d46e-4fdc-b3a2-765d0d2411d3 tempest-ServerPasswordTestJSON-2106462762 tempest-ServerPasswordTestJSON-2106462762-project-member] Lock "6fc82922-9142-475b-99a6-bbc5ee43b30b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.781s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.549892] env[61898]: INFO nova.compute.manager [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] [instance: 31239011-3cd9-4fea-a99d-26d09884497b] Took 1.02 seconds to deallocate network for instance. [ 701.815016] env[61898]: DEBUG nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 701.818683] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.819297] env[61898]: DEBUG nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 701.822108] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.121s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.823179] env[61898]: INFO nova.compute.claims [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.326695] env[61898]: DEBUG nova.compute.utils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 702.335276] env[61898]: DEBUG nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 702.335276] env[61898]: DEBUG nova.network.neutron [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 702.363258] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.416749] env[61898]: DEBUG nova.policy [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2dd151f92a04f23be20c68d63d33552', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa869ca8813f4a52b1e9d828d3c908b5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 702.583942] env[61898]: INFO nova.scheduler.client.report [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Deleted allocations for instance 31239011-3cd9-4fea-a99d-26d09884497b [ 702.834762] env[61898]: DEBUG nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 702.993440] env[61898]: DEBUG nova.network.neutron [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Successfully created port: fd6cab6f-8a77-4c20-8cea-570aca74aacd {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.096309] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a71c3add-4ec2-4cc0-8181-36d5d4c2ba41 tempest-ServersV294TestFqdnHostnames-1824104406 tempest-ServersV294TestFqdnHostnames-1824104406-project-member] Lock "31239011-3cd9-4fea-a99d-26d09884497b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.327s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.252911] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a9b913-462e-466a-a2f3-16bbebafc865 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.263208] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99070aa-b9c0-4382-a663-d16d2d375aaa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.300303] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af8f362-e550-4435-94fe-7c2bd3033e98 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.309018] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1846478b-295c-4604-a745-17b0dda216fe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.324290] env[61898]: DEBUG nova.compute.provider_tree [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.601137] env[61898]: DEBUG nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 703.830741] env[61898]: DEBUG nova.scheduler.client.report [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 703.849947] env[61898]: DEBUG nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 703.885108] env[61898]: DEBUG nova.virt.hardware [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 703.885411] env[61898]: DEBUG nova.virt.hardware [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 703.885603] env[61898]: DEBUG nova.virt.hardware [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 703.885823] env[61898]: DEBUG nova.virt.hardware [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 703.886016] env[61898]: DEBUG nova.virt.hardware [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 703.886185] env[61898]: DEBUG nova.virt.hardware [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 703.886391] env[61898]: DEBUG nova.virt.hardware [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 703.886549] env[61898]: DEBUG nova.virt.hardware [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 703.887455] env[61898]: DEBUG nova.virt.hardware [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 703.887637] env[61898]: DEBUG nova.virt.hardware [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 703.887815] env[61898]: DEBUG nova.virt.hardware [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 703.888704] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6021d73c-bc43-47ed-8ced-8e7e9436434b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.896671] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11c52f9-0efb-47b7-8f45-d053446aa657 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.130358] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.334203] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.335082] env[61898]: DEBUG nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 704.340383] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.158s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.412535] env[61898]: DEBUG nova.compute.manager [req-b1318e53-7118-41f1-acce-162ceb1d6677 req-bce34e72-9daa-4224-9410-d6c86e36f608 service nova] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Received event network-changed-fd6cab6f-8a77-4c20-8cea-570aca74aacd {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 704.412748] env[61898]: DEBUG nova.compute.manager [req-b1318e53-7118-41f1-acce-162ceb1d6677 req-bce34e72-9daa-4224-9410-d6c86e36f608 service nova] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Refreshing instance network info cache due to event network-changed-fd6cab6f-8a77-4c20-8cea-570aca74aacd. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 704.412959] env[61898]: DEBUG oslo_concurrency.lockutils [req-b1318e53-7118-41f1-acce-162ceb1d6677 req-bce34e72-9daa-4224-9410-d6c86e36f608 service nova] Acquiring lock "refresh_cache-4a6adf12-7106-46ce-abb0-fe8c5c212905" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.413188] env[61898]: DEBUG oslo_concurrency.lockutils [req-b1318e53-7118-41f1-acce-162ceb1d6677 req-bce34e72-9daa-4224-9410-d6c86e36f608 service nova] Acquired lock "refresh_cache-4a6adf12-7106-46ce-abb0-fe8c5c212905" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.413411] env[61898]: DEBUG nova.network.neutron [req-b1318e53-7118-41f1-acce-162ceb1d6677 req-bce34e72-9daa-4224-9410-d6c86e36f608 service nova] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Refreshing network info cache for port fd6cab6f-8a77-4c20-8cea-570aca74aacd {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 704.541274] env[61898]: ERROR nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fd6cab6f-8a77-4c20-8cea-570aca74aacd, please check neutron logs for more information. [ 704.541274] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 704.541274] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 704.541274] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 704.541274] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 704.541274] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 704.541274] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 704.541274] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 704.541274] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.541274] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 704.541274] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.541274] env[61898]: ERROR nova.compute.manager raise self.value [ 704.541274] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 704.541274] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 704.541274] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.541274] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 704.541916] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.541916] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 704.541916] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fd6cab6f-8a77-4c20-8cea-570aca74aacd, please check neutron logs for more information. [ 704.541916] env[61898]: ERROR nova.compute.manager [ 704.541916] env[61898]: Traceback (most recent call last): [ 704.541916] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 704.541916] env[61898]: listener.cb(fileno) [ 704.541916] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.541916] env[61898]: result = function(*args, **kwargs) [ 704.541916] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.541916] env[61898]: return func(*args, **kwargs) [ 704.541916] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 704.541916] env[61898]: raise e [ 704.541916] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 704.541916] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 704.541916] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 704.541916] env[61898]: created_port_ids = self._update_ports_for_instance( [ 704.541916] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 704.541916] env[61898]: with excutils.save_and_reraise_exception(): [ 704.541916] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.541916] env[61898]: self.force_reraise() [ 704.541916] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.541916] env[61898]: raise self.value [ 704.541916] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 704.541916] env[61898]: updated_port = self._update_port( [ 704.541916] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.541916] env[61898]: _ensure_no_port_binding_failure(port) [ 704.541916] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.541916] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 704.542960] env[61898]: nova.exception.PortBindingFailed: Binding failed for port fd6cab6f-8a77-4c20-8cea-570aca74aacd, please check neutron logs for more information. [ 704.542960] env[61898]: Removing descriptor: 20 [ 704.542960] env[61898]: ERROR nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fd6cab6f-8a77-4c20-8cea-570aca74aacd, please check neutron logs for more information. [ 704.542960] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Traceback (most recent call last): [ 704.542960] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 704.542960] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] yield resources [ 704.542960] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 704.542960] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] self.driver.spawn(context, instance, image_meta, [ 704.542960] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 704.542960] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.542960] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.542960] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] vm_ref = self.build_virtual_machine(instance, [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] for vif in network_info: [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] return self._sync_wrapper(fn, *args, **kwargs) [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] self.wait() [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] self[:] = self._gt.wait() [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] return self._exit_event.wait() [ 704.543391] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] result = hub.switch() [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] return self.greenlet.switch() [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] result = function(*args, **kwargs) [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] return func(*args, **kwargs) [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] raise e [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] nwinfo = self.network_api.allocate_for_instance( [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 704.543805] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] created_port_ids = self._update_ports_for_instance( [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] with excutils.save_and_reraise_exception(): [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] self.force_reraise() [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] raise self.value [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] updated_port = self._update_port( [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] _ensure_no_port_binding_failure(port) [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.544223] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] raise exception.PortBindingFailed(port_id=port['id']) [ 704.544618] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] nova.exception.PortBindingFailed: Binding failed for port fd6cab6f-8a77-4c20-8cea-570aca74aacd, please check neutron logs for more information. [ 704.544618] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] [ 704.544618] env[61898]: INFO nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Terminating instance [ 704.848844] env[61898]: DEBUG nova.compute.utils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 704.853567] env[61898]: DEBUG nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 704.853817] env[61898]: DEBUG nova.network.neutron [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 704.925039] env[61898]: DEBUG nova.policy [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2dd151f92a04f23be20c68d63d33552', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa869ca8813f4a52b1e9d828d3c908b5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 704.949685] env[61898]: DEBUG nova.network.neutron [req-b1318e53-7118-41f1-acce-162ceb1d6677 req-bce34e72-9daa-4224-9410-d6c86e36f608 service nova] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.052942] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquiring lock "refresh_cache-4a6adf12-7106-46ce-abb0-fe8c5c212905" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.102125] env[61898]: DEBUG nova.network.neutron [req-b1318e53-7118-41f1-acce-162ceb1d6677 req-bce34e72-9daa-4224-9410-d6c86e36f608 service nova] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.188110] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9749f3-73fb-43a8-954e-333c8dc5c0e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.196269] env[61898]: DEBUG nova.network.neutron [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Successfully created port: 41d5d6de-432b-43ed-a989-213d3b4114de {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.200028] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024a6a71-ead5-4e19-8a5b-c852f35c5115 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.231874] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7a26e6-2652-4974-8326-71c690440bb2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.239775] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8476f148-9ae1-4112-8574-a70c00cd80fc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.257739] env[61898]: DEBUG nova.compute.provider_tree [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.355085] env[61898]: DEBUG nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 705.604529] env[61898]: DEBUG oslo_concurrency.lockutils [req-b1318e53-7118-41f1-acce-162ceb1d6677 req-bce34e72-9daa-4224-9410-d6c86e36f608 service nova] Releasing lock "refresh_cache-4a6adf12-7106-46ce-abb0-fe8c5c212905" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.604945] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquired lock "refresh_cache-4a6adf12-7106-46ce-abb0-fe8c5c212905" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.605157] env[61898]: DEBUG nova.network.neutron [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 705.763684] env[61898]: DEBUG nova.scheduler.client.report [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 706.137158] env[61898]: DEBUG nova.network.neutron [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.249601] env[61898]: DEBUG nova.network.neutron [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.271233] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.931s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.271868] env[61898]: ERROR nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 94a1b793-fff2-4121-98c2-bb91979f77f2, please check neutron logs for more information. [ 706.271868] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Traceback (most recent call last): [ 706.271868] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 706.271868] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] self.driver.spawn(context, instance, image_meta, [ 706.271868] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 706.271868] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 706.271868] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 706.271868] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] vm_ref = self.build_virtual_machine(instance, [ 706.271868] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 706.271868] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] vif_infos = vmwarevif.get_vif_info(self._session, [ 706.271868] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] for vif in network_info: [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] return self._sync_wrapper(fn, *args, **kwargs) [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] self.wait() [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] self[:] = self._gt.wait() [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] return self._exit_event.wait() [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] current.throw(*self._exc) [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.272453] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] result = function(*args, **kwargs) [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] return func(*args, **kwargs) [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] raise e [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] nwinfo = self.network_api.allocate_for_instance( [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] created_port_ids = self._update_ports_for_instance( [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] with excutils.save_and_reraise_exception(): [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] self.force_reraise() [ 706.274377] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.275180] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] raise self.value [ 706.275180] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.275180] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] updated_port = self._update_port( [ 706.275180] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.275180] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] _ensure_no_port_binding_failure(port) [ 706.275180] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.275180] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] raise exception.PortBindingFailed(port_id=port['id']) [ 706.275180] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] nova.exception.PortBindingFailed: Binding failed for port 94a1b793-fff2-4121-98c2-bb91979f77f2, please check neutron logs for more information. [ 706.275180] env[61898]: ERROR nova.compute.manager [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] [ 706.275180] env[61898]: DEBUG nova.compute.utils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Binding failed for port 94a1b793-fff2-4121-98c2-bb91979f77f2, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 706.276568] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.260s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.283105] env[61898]: DEBUG nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Build of instance 8ab18b24-91d4-4718-8f1a-d82f4226ba2a was re-scheduled: Binding failed for port 94a1b793-fff2-4121-98c2-bb91979f77f2, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 706.283105] env[61898]: DEBUG nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 706.283303] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Acquiring lock "refresh_cache-8ab18b24-91d4-4718-8f1a-d82f4226ba2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.283961] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Acquired lock "refresh_cache-8ab18b24-91d4-4718-8f1a-d82f4226ba2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.283961] env[61898]: DEBUG nova.network.neutron [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 706.369310] env[61898]: DEBUG nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 706.396920] env[61898]: DEBUG nova.virt.hardware [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 706.398152] env[61898]: DEBUG nova.virt.hardware [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 706.398329] env[61898]: DEBUG nova.virt.hardware [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.398552] env[61898]: DEBUG nova.virt.hardware [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 706.398698] env[61898]: DEBUG nova.virt.hardware [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.399169] env[61898]: DEBUG nova.virt.hardware [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 706.399390] env[61898]: DEBUG nova.virt.hardware [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 706.399545] env[61898]: DEBUG nova.virt.hardware [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 706.399874] env[61898]: DEBUG nova.virt.hardware [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 706.400066] env[61898]: DEBUG nova.virt.hardware [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 706.401947] env[61898]: DEBUG nova.virt.hardware [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 706.401947] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdef620-a46e-4b5c-ac36-2b187beb984f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.409097] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb7b859-33dc-4e85-abb6-0a0403bc4201 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.487100] env[61898]: DEBUG nova.compute.manager [req-85ff09ef-7a53-44e2-ad78-4bf026cf8199 req-abdb3c38-6138-436a-89fb-a1be0f76585a service nova] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Received event network-vif-deleted-fd6cab6f-8a77-4c20-8cea-570aca74aacd {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 706.487213] env[61898]: DEBUG nova.compute.manager [req-85ff09ef-7a53-44e2-ad78-4bf026cf8199 req-abdb3c38-6138-436a-89fb-a1be0f76585a service nova] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Received event network-changed-41d5d6de-432b-43ed-a989-213d3b4114de {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 706.487407] env[61898]: DEBUG nova.compute.manager [req-85ff09ef-7a53-44e2-ad78-4bf026cf8199 req-abdb3c38-6138-436a-89fb-a1be0f76585a service nova] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Refreshing instance network info cache due to event network-changed-41d5d6de-432b-43ed-a989-213d3b4114de. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 706.487635] env[61898]: DEBUG oslo_concurrency.lockutils [req-85ff09ef-7a53-44e2-ad78-4bf026cf8199 req-abdb3c38-6138-436a-89fb-a1be0f76585a service nova] Acquiring lock "refresh_cache-8ac73bda-db02-4427-9730-003561d078ca" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.487847] env[61898]: DEBUG oslo_concurrency.lockutils [req-85ff09ef-7a53-44e2-ad78-4bf026cf8199 req-abdb3c38-6138-436a-89fb-a1be0f76585a service nova] Acquired lock "refresh_cache-8ac73bda-db02-4427-9730-003561d078ca" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.487958] env[61898]: DEBUG nova.network.neutron [req-85ff09ef-7a53-44e2-ad78-4bf026cf8199 req-abdb3c38-6138-436a-89fb-a1be0f76585a service nova] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Refreshing network info cache for port 41d5d6de-432b-43ed-a989-213d3b4114de {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 706.565871] env[61898]: ERROR nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 41d5d6de-432b-43ed-a989-213d3b4114de, please check neutron logs for more information. [ 706.565871] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 706.565871] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 706.565871] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 706.565871] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.565871] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 706.565871] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.565871] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 706.565871] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.565871] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 706.565871] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.565871] env[61898]: ERROR nova.compute.manager raise self.value [ 706.565871] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.565871] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 706.565871] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.565871] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 706.566528] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.566528] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 706.566528] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 41d5d6de-432b-43ed-a989-213d3b4114de, please check neutron logs for more information. [ 706.566528] env[61898]: ERROR nova.compute.manager [ 706.566528] env[61898]: Traceback (most recent call last): [ 706.566528] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 706.566528] env[61898]: listener.cb(fileno) [ 706.566528] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.566528] env[61898]: result = function(*args, **kwargs) [ 706.566528] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 706.566528] env[61898]: return func(*args, **kwargs) [ 706.566528] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 706.566528] env[61898]: raise e [ 706.566528] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 706.566528] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 706.566528] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.566528] env[61898]: created_port_ids = self._update_ports_for_instance( [ 706.566528] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.566528] env[61898]: with excutils.save_and_reraise_exception(): [ 706.566528] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.566528] env[61898]: self.force_reraise() [ 706.566528] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.566528] env[61898]: raise self.value [ 706.566528] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.566528] env[61898]: updated_port = self._update_port( [ 706.566528] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.566528] env[61898]: _ensure_no_port_binding_failure(port) [ 706.566528] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.566528] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 706.567404] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 41d5d6de-432b-43ed-a989-213d3b4114de, please check neutron logs for more information. [ 706.567404] env[61898]: Removing descriptor: 20 [ 706.567404] env[61898]: ERROR nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 41d5d6de-432b-43ed-a989-213d3b4114de, please check neutron logs for more information. [ 706.567404] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] Traceback (most recent call last): [ 706.567404] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 706.567404] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] yield resources [ 706.567404] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 706.567404] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] self.driver.spawn(context, instance, image_meta, [ 706.567404] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 706.567404] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 706.567404] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 706.567404] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] vm_ref = self.build_virtual_machine(instance, [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] vif_infos = vmwarevif.get_vif_info(self._session, [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] for vif in network_info: [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] return self._sync_wrapper(fn, *args, **kwargs) [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] self.wait() [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] self[:] = self._gt.wait() [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] return self._exit_event.wait() [ 706.567844] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] result = hub.switch() [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] return self.greenlet.switch() [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] result = function(*args, **kwargs) [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] return func(*args, **kwargs) [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] raise e [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] nwinfo = self.network_api.allocate_for_instance( [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 706.568303] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] created_port_ids = self._update_ports_for_instance( [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] with excutils.save_and_reraise_exception(): [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] self.force_reraise() [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] raise self.value [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] updated_port = self._update_port( [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] _ensure_no_port_binding_failure(port) [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.568753] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] raise exception.PortBindingFailed(port_id=port['id']) [ 706.569872] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] nova.exception.PortBindingFailed: Binding failed for port 41d5d6de-432b-43ed-a989-213d3b4114de, please check neutron logs for more information. [ 706.569872] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] [ 706.569872] env[61898]: INFO nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Terminating instance [ 706.752486] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Releasing lock "refresh_cache-4a6adf12-7106-46ce-abb0-fe8c5c212905" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.753587] env[61898]: DEBUG nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 706.754053] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 706.755259] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb3ff96b-9770-42ed-8b63-35beb47eafd1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.763902] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9095c8-f684-49f5-8604-bcd23fd54244 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.790369] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4a6adf12-7106-46ce-abb0-fe8c5c212905 could not be found. [ 706.790663] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 706.790924] env[61898]: INFO nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Took 0.04 seconds to destroy the instance on the hypervisor. [ 706.791198] env[61898]: DEBUG oslo.service.loopingcall [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 706.791726] env[61898]: DEBUG nova.compute.manager [-] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 706.791854] env[61898]: DEBUG nova.network.neutron [-] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 706.809276] env[61898]: DEBUG nova.network.neutron [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.812046] env[61898]: DEBUG nova.network.neutron [-] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.906856] env[61898]: DEBUG nova.network.neutron [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.010141] env[61898]: DEBUG nova.network.neutron [req-85ff09ef-7a53-44e2-ad78-4bf026cf8199 req-abdb3c38-6138-436a-89fb-a1be0f76585a service nova] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.058901] env[61898]: DEBUG nova.network.neutron [req-85ff09ef-7a53-44e2-ad78-4bf026cf8199 req-abdb3c38-6138-436a-89fb-a1be0f76585a service nova] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.074885] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquiring lock "refresh_cache-8ac73bda-db02-4427-9730-003561d078ca" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.156757] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1e27ca-d4a8-4f18-b0ad-0d81131d939b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.165030] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f807188-b2c7-4f25-bea6-8f3429c08677 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.199241] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01290cd-06e3-474a-8a2b-bc3b785bd37a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.212025] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605a2f74-fa0d-4f9a-9f7c-90469be45c7d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.222250] env[61898]: DEBUG nova.compute.provider_tree [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.317943] env[61898]: DEBUG nova.network.neutron [-] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.410119] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Releasing lock "refresh_cache-8ab18b24-91d4-4718-8f1a-d82f4226ba2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.410119] env[61898]: DEBUG nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 707.410119] env[61898]: DEBUG nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 707.410119] env[61898]: DEBUG nova.network.neutron [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 707.426147] env[61898]: DEBUG nova.network.neutron [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 707.561117] env[61898]: DEBUG oslo_concurrency.lockutils [req-85ff09ef-7a53-44e2-ad78-4bf026cf8199 req-abdb3c38-6138-436a-89fb-a1be0f76585a service nova] Releasing lock "refresh_cache-8ac73bda-db02-4427-9730-003561d078ca" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.561606] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquired lock "refresh_cache-8ac73bda-db02-4427-9730-003561d078ca" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.561744] env[61898]: DEBUG nova.network.neutron [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.726148] env[61898]: DEBUG nova.scheduler.client.report [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 707.820023] env[61898]: INFO nova.compute.manager [-] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Took 1.03 seconds to deallocate network for instance. [ 707.822418] env[61898]: DEBUG nova.compute.claims [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 707.822589] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.929415] env[61898]: DEBUG nova.network.neutron [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.977711] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.977937] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.079057] env[61898]: DEBUG nova.network.neutron [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 708.122689] env[61898]: DEBUG nova.network.neutron [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.231354] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.957s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.232019] env[61898]: ERROR nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 003c6988-67d6-4d40-8682-5d823dcc867c, please check neutron logs for more information. [ 708.232019] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Traceback (most recent call last): [ 708.232019] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 708.232019] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] self.driver.spawn(context, instance, image_meta, [ 708.232019] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 708.232019] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 708.232019] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 708.232019] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] vm_ref = self.build_virtual_machine(instance, [ 708.232019] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 708.232019] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] vif_infos = vmwarevif.get_vif_info(self._session, [ 708.232019] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] for vif in network_info: [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] return self._sync_wrapper(fn, *args, **kwargs) [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] self.wait() [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] self[:] = self._gt.wait() [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] return self._exit_event.wait() [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] current.throw(*self._exc) [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 708.232338] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] result = function(*args, **kwargs) [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] return func(*args, **kwargs) [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] raise e [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] nwinfo = self.network_api.allocate_for_instance( [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] created_port_ids = self._update_ports_for_instance( [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] with excutils.save_and_reraise_exception(): [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] self.force_reraise() [ 708.232695] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 708.233067] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] raise self.value [ 708.233067] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 708.233067] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] updated_port = self._update_port( [ 708.233067] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 708.233067] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] _ensure_no_port_binding_failure(port) [ 708.233067] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 708.233067] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] raise exception.PortBindingFailed(port_id=port['id']) [ 708.233067] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] nova.exception.PortBindingFailed: Binding failed for port 003c6988-67d6-4d40-8682-5d823dcc867c, please check neutron logs for more information. [ 708.233067] env[61898]: ERROR nova.compute.manager [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] [ 708.233067] env[61898]: DEBUG nova.compute.utils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Binding failed for port 003c6988-67d6-4d40-8682-5d823dcc867c, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 708.233949] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.549s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.235479] env[61898]: INFO nova.compute.claims [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.239335] env[61898]: DEBUG nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Build of instance 1f70b6e1-b534-40a1-b262-e0a5ce3e425e was re-scheduled: Binding failed for port 003c6988-67d6-4d40-8682-5d823dcc867c, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 708.239335] env[61898]: DEBUG nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 708.239335] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-1f70b6e1-b534-40a1-b262-e0a5ce3e425e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.239335] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-1f70b6e1-b534-40a1-b262-e0a5ce3e425e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.239492] env[61898]: DEBUG nova.network.neutron [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 708.435695] env[61898]: INFO nova.compute.manager [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] [instance: 8ab18b24-91d4-4718-8f1a-d82f4226ba2a] Took 1.03 seconds to deallocate network for instance. [ 708.515231] env[61898]: DEBUG nova.compute.manager [req-162e9929-8915-410f-a6fd-918a7b71e5a0 req-963dffb8-cb6b-40d6-86f7-446f7773bc02 service nova] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Received event network-vif-deleted-41d5d6de-432b-43ed-a989-213d3b4114de {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 708.625435] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Releasing lock "refresh_cache-8ac73bda-db02-4427-9730-003561d078ca" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.625885] env[61898]: DEBUG nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 708.626088] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 708.626380] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0017062-117f-492c-853d-a0bacaf796a9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.635641] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656dcfa7-ee79-4e71-be20-1591798671c2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.657638] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8ac73bda-db02-4427-9730-003561d078ca could not be found. [ 708.657832] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 708.658017] env[61898]: INFO nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Took 0.03 seconds to destroy the instance on the hypervisor. [ 708.658256] env[61898]: DEBUG oslo.service.loopingcall [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 708.658504] env[61898]: DEBUG nova.compute.manager [-] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 708.658622] env[61898]: DEBUG nova.network.neutron [-] [instance: 8ac73bda-db02-4427-9730-003561d078ca] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 708.673990] env[61898]: DEBUG nova.network.neutron [-] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 708.759034] env[61898]: DEBUG nova.network.neutron [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 708.842377] env[61898]: DEBUG nova.network.neutron [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.176267] env[61898]: DEBUG nova.network.neutron [-] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.346531] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-1f70b6e1-b534-40a1-b262-e0a5ce3e425e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.346833] env[61898]: DEBUG nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 709.347011] env[61898]: DEBUG nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 709.347192] env[61898]: DEBUG nova.network.neutron [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 709.363095] env[61898]: DEBUG nova.network.neutron [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 709.462715] env[61898]: INFO nova.scheduler.client.report [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Deleted allocations for instance 8ab18b24-91d4-4718-8f1a-d82f4226ba2a [ 709.552643] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f87e3d9-cb42-4003-8b2a-c525e46e36bc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.560694] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d6c47e-9814-4517-b8af-f559e177fa1f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.589946] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f589c38-a9f2-4e74-821f-95f89d520338 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.597547] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8579a534-90a4-4df2-9ae7-f083e43a81a3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.610430] env[61898]: DEBUG nova.compute.provider_tree [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.678693] env[61898]: INFO nova.compute.manager [-] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Took 1.02 seconds to deallocate network for instance. [ 709.683031] env[61898]: DEBUG nova.compute.claims [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 709.683124] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.865644] env[61898]: DEBUG nova.network.neutron [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.972542] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1c0ca6c3-34da-46e7-92e8-8f103732b127 tempest-FloatingIPsAssociationNegativeTestJSON-322793126 tempest-FloatingIPsAssociationNegativeTestJSON-322793126-project-member] Lock "8ab18b24-91d4-4718-8f1a-d82f4226ba2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 146.048s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.113321] env[61898]: DEBUG nova.scheduler.client.report [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 710.368093] env[61898]: INFO nova.compute.manager [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 1f70b6e1-b534-40a1-b262-e0a5ce3e425e] Took 1.02 seconds to deallocate network for instance. [ 710.475426] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 710.626327] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.626327] env[61898]: DEBUG nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 710.627155] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.806s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.629446] env[61898]: INFO nova.compute.claims [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 711.000984] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.134171] env[61898]: DEBUG nova.compute.utils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 711.137907] env[61898]: DEBUG nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 711.137907] env[61898]: DEBUG nova.network.neutron [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 711.195336] env[61898]: DEBUG nova.policy [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '843b8d0d757d460c93afc7dd411c6d81', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2104f7c38d6440d8b23dff9d0cef9abe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 711.409698] env[61898]: INFO nova.scheduler.client.report [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Deleted allocations for instance 1f70b6e1-b534-40a1-b262-e0a5ce3e425e [ 711.524145] env[61898]: DEBUG nova.network.neutron [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Successfully created port: b2b0ebbb-4da8-43f9-b572-941ed83565aa {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 711.644133] env[61898]: DEBUG nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 711.919544] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7dc32a12-dde0-4fb0-9359-69db2c0d531a tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "1f70b6e1-b534-40a1-b262-e0a5ce3e425e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.366s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.956448] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5f3e57-8869-4fd2-b70c-46f1e75099fd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.964886] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7d50f0-bd4f-49ec-a77e-14b81f131055 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.998985] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eae3176-a7f7-497e-b386-9505060653db {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.006637] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4c9a53-fb63-4b1f-aa6a-2e10b8f624fa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.022889] env[61898]: DEBUG nova.compute.provider_tree [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.390341] env[61898]: DEBUG nova.compute.manager [req-4cc19203-33ea-4e46-a313-8ed67c35ca87 req-cc47fa9e-9118-443c-b7a2-e429f1dabb1e service nova] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Received event network-changed-b2b0ebbb-4da8-43f9-b572-941ed83565aa {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 712.390341] env[61898]: DEBUG nova.compute.manager [req-4cc19203-33ea-4e46-a313-8ed67c35ca87 req-cc47fa9e-9118-443c-b7a2-e429f1dabb1e service nova] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Refreshing instance network info cache due to event network-changed-b2b0ebbb-4da8-43f9-b572-941ed83565aa. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 712.390341] env[61898]: DEBUG oslo_concurrency.lockutils [req-4cc19203-33ea-4e46-a313-8ed67c35ca87 req-cc47fa9e-9118-443c-b7a2-e429f1dabb1e service nova] Acquiring lock "refresh_cache-10e3f3dd-165b-4049-8c1f-f561c91717c0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.390341] env[61898]: DEBUG oslo_concurrency.lockutils [req-4cc19203-33ea-4e46-a313-8ed67c35ca87 req-cc47fa9e-9118-443c-b7a2-e429f1dabb1e service nova] Acquired lock "refresh_cache-10e3f3dd-165b-4049-8c1f-f561c91717c0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.390341] env[61898]: DEBUG nova.network.neutron [req-4cc19203-33ea-4e46-a313-8ed67c35ca87 req-cc47fa9e-9118-443c-b7a2-e429f1dabb1e service nova] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Refreshing network info cache for port b2b0ebbb-4da8-43f9-b572-941ed83565aa {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 712.422978] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 712.526711] env[61898]: DEBUG nova.scheduler.client.report [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 712.652342] env[61898]: ERROR nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b2b0ebbb-4da8-43f9-b572-941ed83565aa, please check neutron logs for more information. [ 712.652342] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 712.652342] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 712.652342] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 712.652342] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.652342] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 712.652342] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.652342] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 712.652342] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.652342] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 712.652342] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.652342] env[61898]: ERROR nova.compute.manager raise self.value [ 712.652342] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.652342] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 712.652342] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.652342] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 712.653088] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.653088] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 712.653088] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b2b0ebbb-4da8-43f9-b572-941ed83565aa, please check neutron logs for more information. [ 712.653088] env[61898]: ERROR nova.compute.manager [ 712.653088] env[61898]: Traceback (most recent call last): [ 712.653088] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 712.653088] env[61898]: listener.cb(fileno) [ 712.653088] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.653088] env[61898]: result = function(*args, **kwargs) [ 712.653088] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 712.653088] env[61898]: return func(*args, **kwargs) [ 712.653088] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 712.653088] env[61898]: raise e [ 712.653088] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 712.653088] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 712.653088] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.653088] env[61898]: created_port_ids = self._update_ports_for_instance( [ 712.653088] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.653088] env[61898]: with excutils.save_and_reraise_exception(): [ 712.653088] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.653088] env[61898]: self.force_reraise() [ 712.653088] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.653088] env[61898]: raise self.value [ 712.653088] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.653088] env[61898]: updated_port = self._update_port( [ 712.653088] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.653088] env[61898]: _ensure_no_port_binding_failure(port) [ 712.653088] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.653088] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 712.653822] env[61898]: nova.exception.PortBindingFailed: Binding failed for port b2b0ebbb-4da8-43f9-b572-941ed83565aa, please check neutron logs for more information. [ 712.653822] env[61898]: Removing descriptor: 20 [ 712.664225] env[61898]: DEBUG nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 712.700230] env[61898]: DEBUG nova.virt.hardware [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 712.700506] env[61898]: DEBUG nova.virt.hardware [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 712.700650] env[61898]: DEBUG nova.virt.hardware [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.700837] env[61898]: DEBUG nova.virt.hardware [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 712.700981] env[61898]: DEBUG nova.virt.hardware [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.701138] env[61898]: DEBUG nova.virt.hardware [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 712.701373] env[61898]: DEBUG nova.virt.hardware [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 712.701536] env[61898]: DEBUG nova.virt.hardware [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 712.701765] env[61898]: DEBUG nova.virt.hardware [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 712.701939] env[61898]: DEBUG nova.virt.hardware [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 712.702123] env[61898]: DEBUG nova.virt.hardware [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 712.703036] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63882b12-350e-4bdd-9b78-08ca0d5f0e67 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.712165] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-430420b5-e3a6-4101-8a43-1ec049663752 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.725851] env[61898]: ERROR nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b2b0ebbb-4da8-43f9-b572-941ed83565aa, please check neutron logs for more information. [ 712.725851] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Traceback (most recent call last): [ 712.725851] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 712.725851] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] yield resources [ 712.725851] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 712.725851] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] self.driver.spawn(context, instance, image_meta, [ 712.725851] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 712.725851] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.725851] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 712.725851] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] vm_ref = self.build_virtual_machine(instance, [ 712.725851] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] vif_infos = vmwarevif.get_vif_info(self._session, [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] for vif in network_info: [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] return self._sync_wrapper(fn, *args, **kwargs) [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] self.wait() [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] self[:] = self._gt.wait() [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] return self._exit_event.wait() [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 712.726225] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] current.throw(*self._exc) [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] result = function(*args, **kwargs) [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] return func(*args, **kwargs) [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] raise e [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] nwinfo = self.network_api.allocate_for_instance( [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] created_port_ids = self._update_ports_for_instance( [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] with excutils.save_and_reraise_exception(): [ 712.726570] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.726935] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] self.force_reraise() [ 712.726935] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.726935] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] raise self.value [ 712.726935] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 712.726935] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] updated_port = self._update_port( [ 712.726935] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.726935] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] _ensure_no_port_binding_failure(port) [ 712.726935] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.726935] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] raise exception.PortBindingFailed(port_id=port['id']) [ 712.726935] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] nova.exception.PortBindingFailed: Binding failed for port b2b0ebbb-4da8-43f9-b572-941ed83565aa, please check neutron logs for more information. [ 712.726935] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] [ 712.726935] env[61898]: INFO nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Terminating instance [ 712.915081] env[61898]: DEBUG nova.network.neutron [req-4cc19203-33ea-4e46-a313-8ed67c35ca87 req-cc47fa9e-9118-443c-b7a2-e429f1dabb1e service nova] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.959383] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.031758] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.404s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.032325] env[61898]: DEBUG nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 713.037514] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.424s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.095599] env[61898]: DEBUG nova.network.neutron [req-4cc19203-33ea-4e46-a313-8ed67c35ca87 req-cc47fa9e-9118-443c-b7a2-e429f1dabb1e service nova] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.230593] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Acquiring lock "refresh_cache-10e3f3dd-165b-4049-8c1f-f561c91717c0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.543347] env[61898]: DEBUG nova.compute.utils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 713.551875] env[61898]: DEBUG nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 713.553576] env[61898]: DEBUG nova.network.neutron [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 713.598059] env[61898]: DEBUG oslo_concurrency.lockutils [req-4cc19203-33ea-4e46-a313-8ed67c35ca87 req-cc47fa9e-9118-443c-b7a2-e429f1dabb1e service nova] Releasing lock "refresh_cache-10e3f3dd-165b-4049-8c1f-f561c91717c0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.598499] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Acquired lock "refresh_cache-10e3f3dd-165b-4049-8c1f-f561c91717c0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.598628] env[61898]: DEBUG nova.network.neutron [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 713.740759] env[61898]: DEBUG nova.policy [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f708122b70146109f7a193eb37764e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fa23dfea6c5417aab78bc6c830229e5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 713.844125] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e5974a-c977-404e-b690-f47d28447882 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.852943] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7b3f42-fd2f-4391-88ad-919a7ee0df55 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.885839] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54351f50-441b-4cf7-ad9c-ec0a7337408a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.892862] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b105118-c3d3-4fac-9543-066f6a4a13de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.906547] env[61898]: DEBUG nova.compute.provider_tree [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.052442] env[61898]: DEBUG nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 714.124569] env[61898]: DEBUG nova.network.neutron [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.211156] env[61898]: DEBUG nova.network.neutron [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.318980] env[61898]: DEBUG nova.network.neutron [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Successfully created port: 1cd401cb-5671-4baf-ac64-732214d54d44 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.410526] env[61898]: DEBUG nova.scheduler.client.report [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 714.428423] env[61898]: DEBUG nova.compute.manager [req-ec1d6d61-4640-4099-a567-4bf4510b332f req-003dd4b3-186d-4de0-bdec-474a7925ae2f service nova] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Received event network-vif-deleted-b2b0ebbb-4da8-43f9-b572-941ed83565aa {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 714.713363] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Releasing lock "refresh_cache-10e3f3dd-165b-4049-8c1f-f561c91717c0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.717107] env[61898]: DEBUG nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 714.717107] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 714.717107] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9676439f-2412-46bf-9b2e-23ccffd9634b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.726199] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976f821b-3d7b-415e-bbf4-ab8a8883e58e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.754281] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 10e3f3dd-165b-4049-8c1f-f561c91717c0 could not be found. [ 714.754523] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.754710] env[61898]: INFO nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 714.754952] env[61898]: DEBUG oslo.service.loopingcall [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.755187] env[61898]: DEBUG nova.compute.manager [-] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 714.755337] env[61898]: DEBUG nova.network.neutron [-] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 714.792169] env[61898]: DEBUG nova.network.neutron [-] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.916391] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.879s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.917096] env[61898]: ERROR nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f2ea5526-d249-482e-9ddf-7aa51259834a, please check neutron logs for more information. [ 714.917096] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Traceback (most recent call last): [ 714.917096] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 714.917096] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] self.driver.spawn(context, instance, image_meta, [ 714.917096] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 714.917096] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 714.917096] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 714.917096] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] vm_ref = self.build_virtual_machine(instance, [ 714.917096] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 714.917096] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] vif_infos = vmwarevif.get_vif_info(self._session, [ 714.917096] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] for vif in network_info: [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] return self._sync_wrapper(fn, *args, **kwargs) [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] self.wait() [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] self[:] = self._gt.wait() [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] return self._exit_event.wait() [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] current.throw(*self._exc) [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 714.917494] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] result = function(*args, **kwargs) [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] return func(*args, **kwargs) [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] raise e [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] nwinfo = self.network_api.allocate_for_instance( [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] created_port_ids = self._update_ports_for_instance( [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] with excutils.save_and_reraise_exception(): [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] self.force_reraise() [ 714.918141] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.918623] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] raise self.value [ 714.918623] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 714.918623] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] updated_port = self._update_port( [ 714.918623] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.918623] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] _ensure_no_port_binding_failure(port) [ 714.918623] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.918623] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] raise exception.PortBindingFailed(port_id=port['id']) [ 714.918623] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] nova.exception.PortBindingFailed: Binding failed for port f2ea5526-d249-482e-9ddf-7aa51259834a, please check neutron logs for more information. [ 714.918623] env[61898]: ERROR nova.compute.manager [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] [ 714.918623] env[61898]: DEBUG nova.compute.utils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Binding failed for port f2ea5526-d249-482e-9ddf-7aa51259834a, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 714.920712] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.407s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.920945] env[61898]: DEBUG nova.objects.instance [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lazy-loading 'resources' on Instance uuid 81fd9ccc-a267-498d-93d4-8adf894ee8d8 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 714.923577] env[61898]: DEBUG nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Build of instance 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a was re-scheduled: Binding failed for port f2ea5526-d249-482e-9ddf-7aa51259834a, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 714.924232] env[61898]: DEBUG nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 714.924456] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Acquiring lock "refresh_cache-72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.925257] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Acquired lock "refresh_cache-72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.925428] env[61898]: DEBUG nova.network.neutron [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 715.063528] env[61898]: DEBUG nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 715.099712] env[61898]: DEBUG nova.virt.hardware [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 715.099712] env[61898]: DEBUG nova.virt.hardware [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 715.100078] env[61898]: DEBUG nova.virt.hardware [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.100078] env[61898]: DEBUG nova.virt.hardware [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 715.100145] env[61898]: DEBUG nova.virt.hardware [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.100251] env[61898]: DEBUG nova.virt.hardware [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 715.100449] env[61898]: DEBUG nova.virt.hardware [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 715.100635] env[61898]: DEBUG nova.virt.hardware [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 715.100806] env[61898]: DEBUG nova.virt.hardware [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 715.100966] env[61898]: DEBUG nova.virt.hardware [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 715.101157] env[61898]: DEBUG nova.virt.hardware [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 715.102073] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92165b55-c3ba-45f9-9f12-df9c5648235a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.111991] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da3e3f1-5b78-4468-acb4-627b127217f3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.224435] env[61898]: ERROR nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1cd401cb-5671-4baf-ac64-732214d54d44, please check neutron logs for more information. [ 715.224435] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 715.224435] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 715.224435] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 715.224435] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 715.224435] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 715.224435] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 715.224435] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 715.224435] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.224435] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 715.224435] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.224435] env[61898]: ERROR nova.compute.manager raise self.value [ 715.224435] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 715.224435] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 715.224435] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.224435] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 715.224886] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.224886] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 715.224886] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1cd401cb-5671-4baf-ac64-732214d54d44, please check neutron logs for more information. [ 715.224886] env[61898]: ERROR nova.compute.manager [ 715.224886] env[61898]: Traceback (most recent call last): [ 715.224886] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 715.224886] env[61898]: listener.cb(fileno) [ 715.224886] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 715.224886] env[61898]: result = function(*args, **kwargs) [ 715.224886] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 715.224886] env[61898]: return func(*args, **kwargs) [ 715.224886] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 715.224886] env[61898]: raise e [ 715.224886] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 715.224886] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 715.224886] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 715.224886] env[61898]: created_port_ids = self._update_ports_for_instance( [ 715.224886] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 715.224886] env[61898]: with excutils.save_and_reraise_exception(): [ 715.224886] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.224886] env[61898]: self.force_reraise() [ 715.224886] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.224886] env[61898]: raise self.value [ 715.224886] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 715.224886] env[61898]: updated_port = self._update_port( [ 715.224886] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.224886] env[61898]: _ensure_no_port_binding_failure(port) [ 715.224886] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.224886] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 715.225766] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 1cd401cb-5671-4baf-ac64-732214d54d44, please check neutron logs for more information. [ 715.225766] env[61898]: Removing descriptor: 20 [ 715.225766] env[61898]: ERROR nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1cd401cb-5671-4baf-ac64-732214d54d44, please check neutron logs for more information. [ 715.225766] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Traceback (most recent call last): [ 715.225766] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 715.225766] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] yield resources [ 715.225766] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 715.225766] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] self.driver.spawn(context, instance, image_meta, [ 715.225766] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 715.225766] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] self._vmops.spawn(context, instance, image_meta, injected_files, [ 715.225766] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 715.225766] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] vm_ref = self.build_virtual_machine(instance, [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] vif_infos = vmwarevif.get_vif_info(self._session, [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] for vif in network_info: [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] return self._sync_wrapper(fn, *args, **kwargs) [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] self.wait() [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] self[:] = self._gt.wait() [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] return self._exit_event.wait() [ 715.226107] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] result = hub.switch() [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] return self.greenlet.switch() [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] result = function(*args, **kwargs) [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] return func(*args, **kwargs) [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] raise e [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] nwinfo = self.network_api.allocate_for_instance( [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 715.226440] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] created_port_ids = self._update_ports_for_instance( [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] with excutils.save_and_reraise_exception(): [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] self.force_reraise() [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] raise self.value [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] updated_port = self._update_port( [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] _ensure_no_port_binding_failure(port) [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.226776] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] raise exception.PortBindingFailed(port_id=port['id']) [ 715.227091] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] nova.exception.PortBindingFailed: Binding failed for port 1cd401cb-5671-4baf-ac64-732214d54d44, please check neutron logs for more information. [ 715.227091] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] [ 715.227091] env[61898]: INFO nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Terminating instance [ 715.294975] env[61898]: DEBUG nova.network.neutron [-] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.446462] env[61898]: DEBUG nova.network.neutron [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.556915] env[61898]: DEBUG nova.network.neutron [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.655030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "e851d73d-58f0-486a-a95c-70d07e5faad2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.655030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.730362] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Acquiring lock "refresh_cache-03ba4dad-5c58-4582-a36e-95de69b37474" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.730668] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Acquired lock "refresh_cache-03ba4dad-5c58-4582-a36e-95de69b37474" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.730759] env[61898]: DEBUG nova.network.neutron [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 715.797870] env[61898]: INFO nova.compute.manager [-] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Took 1.04 seconds to deallocate network for instance. [ 715.801024] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e95b32-7752-48b3-a531-fe6c8f33f073 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.803920] env[61898]: DEBUG nova.compute.claims [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 715.804108] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.809123] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f43a72-ba8b-4d7c-aa3d-ec7a676ae9fc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.839256] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73154b14-51a8-4d2f-b1fb-591b8f80cfb9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.846564] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e85c9a-4ee1-411d-8511-c57740c1628c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.859235] env[61898]: DEBUG nova.compute.provider_tree [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.059970] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Releasing lock "refresh_cache-72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.060414] env[61898]: DEBUG nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 716.060613] env[61898]: DEBUG nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 716.060834] env[61898]: DEBUG nova.network.neutron [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 716.077688] env[61898]: DEBUG nova.network.neutron [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.255931] env[61898]: DEBUG nova.network.neutron [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 716.347366] env[61898]: DEBUG nova.network.neutron [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.362600] env[61898]: DEBUG nova.scheduler.client.report [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 716.452868] env[61898]: DEBUG nova.compute.manager [req-f4f9ef1f-cc88-4a21-a17e-c317954fdaea req-a3f4b069-624e-4c38-8dff-a2845fb8fd6a service nova] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Received event network-changed-1cd401cb-5671-4baf-ac64-732214d54d44 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 716.453098] env[61898]: DEBUG nova.compute.manager [req-f4f9ef1f-cc88-4a21-a17e-c317954fdaea req-a3f4b069-624e-4c38-8dff-a2845fb8fd6a service nova] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Refreshing instance network info cache due to event network-changed-1cd401cb-5671-4baf-ac64-732214d54d44. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 716.453290] env[61898]: DEBUG oslo_concurrency.lockutils [req-f4f9ef1f-cc88-4a21-a17e-c317954fdaea req-a3f4b069-624e-4c38-8dff-a2845fb8fd6a service nova] Acquiring lock "refresh_cache-03ba4dad-5c58-4582-a36e-95de69b37474" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.581594] env[61898]: DEBUG nova.network.neutron [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.681094] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "52a584e1-61ae-447d-90e0-e15d32a96314" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.681327] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "52a584e1-61ae-447d-90e0-e15d32a96314" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.849517] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Releasing lock "refresh_cache-03ba4dad-5c58-4582-a36e-95de69b37474" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.849964] env[61898]: DEBUG nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 716.850173] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 716.850530] env[61898]: DEBUG oslo_concurrency.lockutils [req-f4f9ef1f-cc88-4a21-a17e-c317954fdaea req-a3f4b069-624e-4c38-8dff-a2845fb8fd6a service nova] Acquired lock "refresh_cache-03ba4dad-5c58-4582-a36e-95de69b37474" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.850722] env[61898]: DEBUG nova.network.neutron [req-f4f9ef1f-cc88-4a21-a17e-c317954fdaea req-a3f4b069-624e-4c38-8dff-a2845fb8fd6a service nova] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Refreshing network info cache for port 1cd401cb-5671-4baf-ac64-732214d54d44 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.852485] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eafa2f77-8436-4401-879d-73a50be7429e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.861276] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ed2e99-a5b2-49e0-a4cf-a0e05454e641 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.872279] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.952s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.874465] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.673s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.875966] env[61898]: INFO nova.compute.claims [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 716.890275] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 03ba4dad-5c58-4582-a36e-95de69b37474 could not be found. [ 716.890590] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 716.890825] env[61898]: INFO nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Took 0.04 seconds to destroy the instance on the hypervisor. [ 716.891113] env[61898]: DEBUG oslo.service.loopingcall [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 716.891375] env[61898]: DEBUG nova.compute.manager [-] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 716.891542] env[61898]: DEBUG nova.network.neutron [-] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 716.896601] env[61898]: INFO nova.scheduler.client.report [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Deleted allocations for instance 81fd9ccc-a267-498d-93d4-8adf894ee8d8 [ 716.908088] env[61898]: DEBUG nova.network.neutron [-] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.085020] env[61898]: INFO nova.compute.manager [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] [instance: 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a] Took 1.02 seconds to deallocate network for instance. [ 717.373157] env[61898]: DEBUG nova.network.neutron [req-f4f9ef1f-cc88-4a21-a17e-c317954fdaea req-a3f4b069-624e-4c38-8dff-a2845fb8fd6a service nova] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.404630] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2acceceb-4625-4dd7-b64d-c307f7c796f4 tempest-ServersAaction247Test-1759629850 tempest-ServersAaction247Test-1759629850-project-member] Lock "81fd9ccc-a267-498d-93d4-8adf894ee8d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.102s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.410356] env[61898]: DEBUG nova.network.neutron [-] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.455086] env[61898]: DEBUG nova.network.neutron [req-f4f9ef1f-cc88-4a21-a17e-c317954fdaea req-a3f4b069-624e-4c38-8dff-a2845fb8fd6a service nova] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.912937] env[61898]: INFO nova.compute.manager [-] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Took 1.02 seconds to deallocate network for instance. [ 717.915424] env[61898]: DEBUG nova.compute.claims [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 717.915599] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.958112] env[61898]: DEBUG oslo_concurrency.lockutils [req-f4f9ef1f-cc88-4a21-a17e-c317954fdaea req-a3f4b069-624e-4c38-8dff-a2845fb8fd6a service nova] Releasing lock "refresh_cache-03ba4dad-5c58-4582-a36e-95de69b37474" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.958777] env[61898]: DEBUG nova.compute.manager [req-f4f9ef1f-cc88-4a21-a17e-c317954fdaea req-a3f4b069-624e-4c38-8dff-a2845fb8fd6a service nova] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Received event network-vif-deleted-1cd401cb-5671-4baf-ac64-732214d54d44 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 718.112155] env[61898]: INFO nova.scheduler.client.report [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Deleted allocations for instance 72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a [ 718.174039] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71a96a7-8753-4a05-9bd3-c2285d32f0c5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.183632] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5deceeac-602d-4f9b-bbdb-97fb1fdb9f2a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.216503] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fcedea-aa6f-4046-b87d-4f08b85592bb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.224524] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24bb05b9-6dcf-47be-beee-78111a3cbed8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.238482] env[61898]: DEBUG nova.compute.provider_tree [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.623851] env[61898]: DEBUG oslo_concurrency.lockutils [None req-aa91cf7d-d2a9-4468-b2e0-c8f8c9b1ea95 tempest-ServersWithSpecificFlavorTestJSON-1288445366 tempest-ServersWithSpecificFlavorTestJSON-1288445366-project-member] Lock "72ed5e07-7e7f-4fe9-b8c8-9ad7055abf0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.734s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.742537] env[61898]: DEBUG nova.scheduler.client.report [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 719.125798] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 719.248415] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.374s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.249053] env[61898]: DEBUG nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 719.255284] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.890s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.255284] env[61898]: INFO nova.compute.claims [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 719.649339] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.759256] env[61898]: DEBUG nova.compute.utils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 719.760828] env[61898]: DEBUG nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 719.761030] env[61898]: DEBUG nova.network.neutron [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 719.813650] env[61898]: DEBUG nova.policy [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee555b0896f748d1886e7037911db84f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c2e835a924c438287e7626c34c2fb05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 720.149677] env[61898]: DEBUG nova.network.neutron [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Successfully created port: e8a2ebbb-f081-4c68-afcf-26bd33033f35 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 720.268146] env[61898]: DEBUG nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 720.600089] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af84bde3-d00c-4b0b-b036-1268cfb87e2a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.610358] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cda382-caf5-4fa3-acc3-c360e6e91aa4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.645953] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73031d72-51b5-4df0-8675-377057a991a5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.653258] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a05d63-b4dd-4971-96af-6a325b3724a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.666741] env[61898]: DEBUG nova.compute.provider_tree [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.044077] env[61898]: DEBUG nova.compute.manager [req-d73148c0-a12f-4efc-b78d-395ca5784731 req-c47397af-c9b1-4dab-b86a-d57f0a4a1d89 service nova] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Received event network-changed-e8a2ebbb-f081-4c68-afcf-26bd33033f35 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 721.044288] env[61898]: DEBUG nova.compute.manager [req-d73148c0-a12f-4efc-b78d-395ca5784731 req-c47397af-c9b1-4dab-b86a-d57f0a4a1d89 service nova] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Refreshing instance network info cache due to event network-changed-e8a2ebbb-f081-4c68-afcf-26bd33033f35. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 721.044508] env[61898]: DEBUG oslo_concurrency.lockutils [req-d73148c0-a12f-4efc-b78d-395ca5784731 req-c47397af-c9b1-4dab-b86a-d57f0a4a1d89 service nova] Acquiring lock "refresh_cache-45138019-b69e-459b-99cf-47a47aa58e40" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.044651] env[61898]: DEBUG oslo_concurrency.lockutils [req-d73148c0-a12f-4efc-b78d-395ca5784731 req-c47397af-c9b1-4dab-b86a-d57f0a4a1d89 service nova] Acquired lock "refresh_cache-45138019-b69e-459b-99cf-47a47aa58e40" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.044813] env[61898]: DEBUG nova.network.neutron [req-d73148c0-a12f-4efc-b78d-395ca5784731 req-c47397af-c9b1-4dab-b86a-d57f0a4a1d89 service nova] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Refreshing network info cache for port e8a2ebbb-f081-4c68-afcf-26bd33033f35 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 721.172019] env[61898]: DEBUG nova.scheduler.client.report [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 721.286559] env[61898]: DEBUG nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 721.295421] env[61898]: ERROR nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e8a2ebbb-f081-4c68-afcf-26bd33033f35, please check neutron logs for more information. [ 721.295421] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 721.295421] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 721.295421] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 721.295421] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 721.295421] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 721.295421] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 721.295421] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 721.295421] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.295421] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 721.295421] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.295421] env[61898]: ERROR nova.compute.manager raise self.value [ 721.295421] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 721.295421] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 721.295421] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.295421] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 721.295872] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.295872] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 721.295872] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e8a2ebbb-f081-4c68-afcf-26bd33033f35, please check neutron logs for more information. [ 721.295872] env[61898]: ERROR nova.compute.manager [ 721.295872] env[61898]: Traceback (most recent call last): [ 721.295872] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 721.295872] env[61898]: listener.cb(fileno) [ 721.295872] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.295872] env[61898]: result = function(*args, **kwargs) [ 721.295872] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 721.295872] env[61898]: return func(*args, **kwargs) [ 721.295872] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 721.295872] env[61898]: raise e [ 721.295872] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 721.295872] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 721.295872] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 721.295872] env[61898]: created_port_ids = self._update_ports_for_instance( [ 721.295872] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 721.295872] env[61898]: with excutils.save_and_reraise_exception(): [ 721.295872] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.295872] env[61898]: self.force_reraise() [ 721.295872] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.295872] env[61898]: raise self.value [ 721.295872] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 721.295872] env[61898]: updated_port = self._update_port( [ 721.295872] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.295872] env[61898]: _ensure_no_port_binding_failure(port) [ 721.295872] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.295872] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 721.296755] env[61898]: nova.exception.PortBindingFailed: Binding failed for port e8a2ebbb-f081-4c68-afcf-26bd33033f35, please check neutron logs for more information. [ 721.296755] env[61898]: Removing descriptor: 20 [ 721.309597] env[61898]: DEBUG nova.virt.hardware [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 721.309881] env[61898]: DEBUG nova.virt.hardware [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 721.310066] env[61898]: DEBUG nova.virt.hardware [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.310253] env[61898]: DEBUG nova.virt.hardware [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 721.310562] env[61898]: DEBUG nova.virt.hardware [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.310635] env[61898]: DEBUG nova.virt.hardware [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 721.310787] env[61898]: DEBUG nova.virt.hardware [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 721.310944] env[61898]: DEBUG nova.virt.hardware [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 721.311124] env[61898]: DEBUG nova.virt.hardware [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 721.311284] env[61898]: DEBUG nova.virt.hardware [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 721.311464] env[61898]: DEBUG nova.virt.hardware [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 721.312313] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69d8c09-363b-499c-8f93-c0af0207acca {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.320122] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8654a0-398c-47a3-be7e-bb6d33a85574 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.333691] env[61898]: ERROR nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e8a2ebbb-f081-4c68-afcf-26bd33033f35, please check neutron logs for more information. [ 721.333691] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Traceback (most recent call last): [ 721.333691] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 721.333691] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] yield resources [ 721.333691] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 721.333691] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] self.driver.spawn(context, instance, image_meta, [ 721.333691] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 721.333691] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] self._vmops.spawn(context, instance, image_meta, injected_files, [ 721.333691] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 721.333691] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] vm_ref = self.build_virtual_machine(instance, [ 721.333691] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] vif_infos = vmwarevif.get_vif_info(self._session, [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] for vif in network_info: [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] return self._sync_wrapper(fn, *args, **kwargs) [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] self.wait() [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] self[:] = self._gt.wait() [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] return self._exit_event.wait() [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 721.334187] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] current.throw(*self._exc) [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] result = function(*args, **kwargs) [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] return func(*args, **kwargs) [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] raise e [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] nwinfo = self.network_api.allocate_for_instance( [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] created_port_ids = self._update_ports_for_instance( [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] with excutils.save_and_reraise_exception(): [ 721.334524] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.334893] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] self.force_reraise() [ 721.334893] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.334893] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] raise self.value [ 721.334893] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 721.334893] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] updated_port = self._update_port( [ 721.334893] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.334893] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] _ensure_no_port_binding_failure(port) [ 721.334893] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.334893] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] raise exception.PortBindingFailed(port_id=port['id']) [ 721.334893] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] nova.exception.PortBindingFailed: Binding failed for port e8a2ebbb-f081-4c68-afcf-26bd33033f35, please check neutron logs for more information. [ 721.334893] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] [ 721.334893] env[61898]: INFO nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Terminating instance [ 721.566889] env[61898]: DEBUG nova.network.neutron [req-d73148c0-a12f-4efc-b78d-395ca5784731 req-c47397af-c9b1-4dab-b86a-d57f0a4a1d89 service nova] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.641044] env[61898]: DEBUG nova.network.neutron [req-d73148c0-a12f-4efc-b78d-395ca5784731 req-c47397af-c9b1-4dab-b86a-d57f0a4a1d89 service nova] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.677317] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.424s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.677718] env[61898]: DEBUG nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 721.683386] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.554s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.684894] env[61898]: INFO nova.compute.claims [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 721.840345] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-45138019-b69e-459b-99cf-47a47aa58e40" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.145078] env[61898]: DEBUG oslo_concurrency.lockutils [req-d73148c0-a12f-4efc-b78d-395ca5784731 req-c47397af-c9b1-4dab-b86a-d57f0a4a1d89 service nova] Releasing lock "refresh_cache-45138019-b69e-459b-99cf-47a47aa58e40" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.145468] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-45138019-b69e-459b-99cf-47a47aa58e40" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.145661] env[61898]: DEBUG nova.network.neutron [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 722.195533] env[61898]: DEBUG nova.compute.utils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 722.199015] env[61898]: DEBUG nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 722.199187] env[61898]: DEBUG nova.network.neutron [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 722.242234] env[61898]: DEBUG nova.policy [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fa69b1429ba541b4a032d4c2da1df85e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'db7e7476a70d40258da71868c79845c4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 722.538456] env[61898]: DEBUG nova.network.neutron [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Successfully created port: 9b1609cf-96a3-4538-a166-e3e53aff506f {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 722.663713] env[61898]: DEBUG nova.network.neutron [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 722.707302] env[61898]: DEBUG nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 722.788355] env[61898]: DEBUG nova.network.neutron [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.054012] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfecaf3e-50b8-46bf-b0e1-1a7765afda1e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.066399] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d8270b-c0eb-4ef3-83e8-d5a895ba602d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.071963] env[61898]: DEBUG nova.compute.manager [req-8941f1f8-d0d2-4f81-892d-9ad77afaaf2f req-d2bc7cc8-70c4-46c9-9479-ee5b6dd8f9e8 service nova] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Received event network-vif-deleted-e8a2ebbb-f081-4c68-afcf-26bd33033f35 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 723.101785] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf89b678-095b-4a14-91ac-af140273af2c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.110292] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad07b1d-a4af-4db9-a0a5-651e877361d0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.126638] env[61898]: DEBUG nova.compute.provider_tree [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.212223] env[61898]: INFO nova.virt.block_device [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Booting with volume 1c404c68-77c8-47ea-857f-5a81d3a1115a at /dev/sda [ 723.261143] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a7da2452-7668-4a9a-894b-2ec743129991 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.273392] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d085afc2-2958-4cb0-bf52-6bb1b5ca6464 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.295847] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-45138019-b69e-459b-99cf-47a47aa58e40" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.296319] env[61898]: DEBUG nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 723.296550] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 723.296999] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b81be73-6f67-42f5-8a51-1d196c767d07 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.298993] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db430067-bc0f-4c42-a83c-36724685b341 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.307929] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31aa7d23-c0f9-456d-a94a-31a88c14cbda {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.320211] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbfc83f-eb60-4c8c-9047-34b04a5f842e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.345270] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39a44a1-1106-429e-8a8a-77e559b448e1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.348032] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 45138019-b69e-459b-99cf-47a47aa58e40 could not be found. [ 723.349053] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 723.349053] env[61898]: INFO nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Took 0.05 seconds to destroy the instance on the hypervisor. [ 723.349053] env[61898]: DEBUG oslo.service.loopingcall [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 723.349053] env[61898]: DEBUG nova.compute.manager [-] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 723.349053] env[61898]: DEBUG nova.network.neutron [-] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 723.354309] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11058862-a454-4ec3-a6f9-f958ae17533f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.367859] env[61898]: DEBUG nova.virt.block_device [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Updating existing volume attachment record: cf51506f-5c1b-4c29-9384-58aae11a9892 {{(pid=61898) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 723.370285] env[61898]: DEBUG nova.network.neutron [-] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 723.543351] env[61898]: ERROR nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9b1609cf-96a3-4538-a166-e3e53aff506f, please check neutron logs for more information. [ 723.543351] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 723.543351] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 723.543351] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 723.543351] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 723.543351] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 723.543351] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 723.543351] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 723.543351] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.543351] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 723.543351] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.543351] env[61898]: ERROR nova.compute.manager raise self.value [ 723.543351] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 723.543351] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 723.543351] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.543351] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 723.543865] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.543865] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 723.543865] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9b1609cf-96a3-4538-a166-e3e53aff506f, please check neutron logs for more information. [ 723.543865] env[61898]: ERROR nova.compute.manager [ 723.543865] env[61898]: Traceback (most recent call last): [ 723.543865] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 723.543865] env[61898]: listener.cb(fileno) [ 723.543865] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 723.543865] env[61898]: result = function(*args, **kwargs) [ 723.543865] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 723.543865] env[61898]: return func(*args, **kwargs) [ 723.543865] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 723.543865] env[61898]: raise e [ 723.543865] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 723.543865] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 723.543865] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 723.543865] env[61898]: created_port_ids = self._update_ports_for_instance( [ 723.543865] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 723.543865] env[61898]: with excutils.save_and_reraise_exception(): [ 723.543865] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.543865] env[61898]: self.force_reraise() [ 723.543865] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.543865] env[61898]: raise self.value [ 723.543865] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 723.543865] env[61898]: updated_port = self._update_port( [ 723.543865] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.543865] env[61898]: _ensure_no_port_binding_failure(port) [ 723.543865] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.543865] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 723.544697] env[61898]: nova.exception.PortBindingFailed: Binding failed for port 9b1609cf-96a3-4538-a166-e3e53aff506f, please check neutron logs for more information. [ 723.544697] env[61898]: Removing descriptor: 20 [ 723.630731] env[61898]: DEBUG nova.scheduler.client.report [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 723.875273] env[61898]: DEBUG nova.network.neutron [-] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.137279] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.455s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.137279] env[61898]: DEBUG nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 724.140138] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.317s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.378133] env[61898]: INFO nova.compute.manager [-] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Took 1.03 seconds to deallocate network for instance. [ 724.380644] env[61898]: DEBUG nova.compute.claims [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 724.380843] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.648142] env[61898]: DEBUG nova.compute.utils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 724.649631] env[61898]: DEBUG nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 724.649798] env[61898]: DEBUG nova.network.neutron [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 724.694709] env[61898]: DEBUG nova.policy [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce1ce30063d34fc7b45112ade77e9a7d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8dbcc96f78ea4094a56be3767caf5e60', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 724.955176] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12bf232-f37b-40a8-a0c7-5d7e48335c20 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.964161] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69a9420-bdfe-4bd2-91d3-31747698b4bd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.997288] env[61898]: DEBUG nova.network.neutron [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Successfully created port: f990e522-d7e8-455a-b83a-fda1e1af7890 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 725.003711] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417f775a-b22e-4810-9d36-8ba0e5e84bd7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.015564] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4354f208-dcaa-4587-8a14-7072420e36aa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.030686] env[61898]: DEBUG nova.compute.provider_tree [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.135659] env[61898]: DEBUG nova.compute.manager [req-3e145dbc-2a6a-4178-bfb3-ac22f5f9fc45 req-3fc77686-57d0-42d8-a978-d45b4e8d0073 service nova] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Received event network-changed-9b1609cf-96a3-4538-a166-e3e53aff506f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 725.135869] env[61898]: DEBUG nova.compute.manager [req-3e145dbc-2a6a-4178-bfb3-ac22f5f9fc45 req-3fc77686-57d0-42d8-a978-d45b4e8d0073 service nova] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Refreshing instance network info cache due to event network-changed-9b1609cf-96a3-4538-a166-e3e53aff506f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 725.136097] env[61898]: DEBUG oslo_concurrency.lockutils [req-3e145dbc-2a6a-4178-bfb3-ac22f5f9fc45 req-3fc77686-57d0-42d8-a978-d45b4e8d0073 service nova] Acquiring lock "refresh_cache-455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.136264] env[61898]: DEBUG oslo_concurrency.lockutils [req-3e145dbc-2a6a-4178-bfb3-ac22f5f9fc45 req-3fc77686-57d0-42d8-a978-d45b4e8d0073 service nova] Acquired lock "refresh_cache-455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.136462] env[61898]: DEBUG nova.network.neutron [req-3e145dbc-2a6a-4178-bfb3-ac22f5f9fc45 req-3fc77686-57d0-42d8-a978-d45b4e8d0073 service nova] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Refreshing network info cache for port 9b1609cf-96a3-4538-a166-e3e53aff506f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 725.153327] env[61898]: DEBUG nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 725.510842] env[61898]: DEBUG nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 725.511398] env[61898]: DEBUG nova.virt.hardware [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 725.511627] env[61898]: DEBUG nova.virt.hardware [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 725.511794] env[61898]: DEBUG nova.virt.hardware [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.511976] env[61898]: DEBUG nova.virt.hardware [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 725.512132] env[61898]: DEBUG nova.virt.hardware [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.512288] env[61898]: DEBUG nova.virt.hardware [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 725.512491] env[61898]: DEBUG nova.virt.hardware [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 725.512647] env[61898]: DEBUG nova.virt.hardware [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 725.512972] env[61898]: DEBUG nova.virt.hardware [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 725.513307] env[61898]: DEBUG nova.virt.hardware [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 725.513524] env[61898]: DEBUG nova.virt.hardware [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 725.514428] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41131423-72fa-4b8d-8af5-048b11090fc6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.522840] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa52623-3463-47ba-8d62-ae8a84727609 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.536368] env[61898]: DEBUG nova.scheduler.client.report [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 725.539880] env[61898]: ERROR nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9b1609cf-96a3-4538-a166-e3e53aff506f, please check neutron logs for more information. [ 725.539880] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Traceback (most recent call last): [ 725.539880] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 725.539880] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] yield resources [ 725.539880] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 725.539880] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] self.driver.spawn(context, instance, image_meta, [ 725.539880] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 725.539880] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 725.539880] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 725.539880] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] vm_ref = self.build_virtual_machine(instance, [ 725.539880] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] vif_infos = vmwarevif.get_vif_info(self._session, [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] for vif in network_info: [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] return self._sync_wrapper(fn, *args, **kwargs) [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] self.wait() [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] self[:] = self._gt.wait() [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] return self._exit_event.wait() [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 725.540287] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] current.throw(*self._exc) [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] result = function(*args, **kwargs) [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] return func(*args, **kwargs) [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] raise e [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] nwinfo = self.network_api.allocate_for_instance( [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] created_port_ids = self._update_ports_for_instance( [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] with excutils.save_and_reraise_exception(): [ 725.540761] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.541190] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] self.force_reraise() [ 725.541190] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.541190] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] raise self.value [ 725.541190] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.541190] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] updated_port = self._update_port( [ 725.541190] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.541190] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] _ensure_no_port_binding_failure(port) [ 725.541190] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.541190] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] raise exception.PortBindingFailed(port_id=port['id']) [ 725.541190] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] nova.exception.PortBindingFailed: Binding failed for port 9b1609cf-96a3-4538-a166-e3e53aff506f, please check neutron logs for more information. [ 725.541190] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] [ 725.541190] env[61898]: INFO nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Terminating instance [ 725.656469] env[61898]: DEBUG nova.network.neutron [req-3e145dbc-2a6a-4178-bfb3-ac22f5f9fc45 req-3fc77686-57d0-42d8-a978-d45b4e8d0073 service nova] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 725.811979] env[61898]: DEBUG nova.network.neutron [req-3e145dbc-2a6a-4178-bfb3-ac22f5f9fc45 req-3fc77686-57d0-42d8-a978-d45b4e8d0073 service nova] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.867456] env[61898]: ERROR nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f990e522-d7e8-455a-b83a-fda1e1af7890, please check neutron logs for more information. [ 725.867456] env[61898]: ERROR nova.compute.manager Traceback (most recent call last): [ 725.867456] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 725.867456] env[61898]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 725.867456] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.867456] env[61898]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 725.867456] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.867456] env[61898]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 725.867456] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.867456] env[61898]: ERROR nova.compute.manager self.force_reraise() [ 725.867456] env[61898]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.867456] env[61898]: ERROR nova.compute.manager raise self.value [ 725.867456] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.867456] env[61898]: ERROR nova.compute.manager updated_port = self._update_port( [ 725.867456] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.867456] env[61898]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 725.868040] env[61898]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.868040] env[61898]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 725.868040] env[61898]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f990e522-d7e8-455a-b83a-fda1e1af7890, please check neutron logs for more information. [ 725.868040] env[61898]: ERROR nova.compute.manager [ 725.868040] env[61898]: Traceback (most recent call last): [ 725.868040] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 725.868040] env[61898]: listener.cb(fileno) [ 725.868040] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 725.868040] env[61898]: result = function(*args, **kwargs) [ 725.868040] env[61898]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 725.868040] env[61898]: return func(*args, **kwargs) [ 725.868040] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 725.868040] env[61898]: raise e [ 725.868040] env[61898]: File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 725.868040] env[61898]: nwinfo = self.network_api.allocate_for_instance( [ 725.868040] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 725.868040] env[61898]: created_port_ids = self._update_ports_for_instance( [ 725.868040] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 725.868040] env[61898]: with excutils.save_and_reraise_exception(): [ 725.868040] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.868040] env[61898]: self.force_reraise() [ 725.868040] env[61898]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.868040] env[61898]: raise self.value [ 725.868040] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 725.868040] env[61898]: updated_port = self._update_port( [ 725.868040] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.868040] env[61898]: _ensure_no_port_binding_failure(port) [ 725.868040] env[61898]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.868040] env[61898]: raise exception.PortBindingFailed(port_id=port['id']) [ 725.868932] env[61898]: nova.exception.PortBindingFailed: Binding failed for port f990e522-d7e8-455a-b83a-fda1e1af7890, please check neutron logs for more information. [ 725.868932] env[61898]: Removing descriptor: 20 [ 726.047681] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Acquiring lock "refresh_cache-455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.048435] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.908s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.049051] env[61898]: ERROR nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fd6cab6f-8a77-4c20-8cea-570aca74aacd, please check neutron logs for more information. [ 726.049051] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Traceback (most recent call last): [ 726.049051] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 726.049051] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] self.driver.spawn(context, instance, image_meta, [ 726.049051] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 726.049051] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] self._vmops.spawn(context, instance, image_meta, injected_files, [ 726.049051] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 726.049051] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] vm_ref = self.build_virtual_machine(instance, [ 726.049051] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 726.049051] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] vif_infos = vmwarevif.get_vif_info(self._session, [ 726.049051] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] for vif in network_info: [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] return self._sync_wrapper(fn, *args, **kwargs) [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] self.wait() [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] self[:] = self._gt.wait() [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] return self._exit_event.wait() [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] result = hub.switch() [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 726.049661] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] return self.greenlet.switch() [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] result = function(*args, **kwargs) [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] return func(*args, **kwargs) [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] raise e [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] nwinfo = self.network_api.allocate_for_instance( [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] created_port_ids = self._update_ports_for_instance( [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] with excutils.save_and_reraise_exception(): [ 726.050232] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 726.050860] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] self.force_reraise() [ 726.050860] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 726.050860] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] raise self.value [ 726.050860] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 726.050860] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] updated_port = self._update_port( [ 726.050860] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 726.050860] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] _ensure_no_port_binding_failure(port) [ 726.050860] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 726.050860] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] raise exception.PortBindingFailed(port_id=port['id']) [ 726.050860] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] nova.exception.PortBindingFailed: Binding failed for port fd6cab6f-8a77-4c20-8cea-570aca74aacd, please check neutron logs for more information. [ 726.050860] env[61898]: ERROR nova.compute.manager [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] [ 726.051387] env[61898]: DEBUG nova.compute.utils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Binding failed for port fd6cab6f-8a77-4c20-8cea-570aca74aacd, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 726.051387] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.368s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.055843] env[61898]: DEBUG nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Build of instance 4a6adf12-7106-46ce-abb0-fe8c5c212905 was re-scheduled: Binding failed for port fd6cab6f-8a77-4c20-8cea-570aca74aacd, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 726.056296] env[61898]: DEBUG nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 726.056514] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquiring lock "refresh_cache-4a6adf12-7106-46ce-abb0-fe8c5c212905" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.056658] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquired lock "refresh_cache-4a6adf12-7106-46ce-abb0-fe8c5c212905" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.056813] env[61898]: DEBUG nova.network.neutron [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 726.162971] env[61898]: DEBUG nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 726.189361] env[61898]: DEBUG nova.virt.hardware [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 726.189606] env[61898]: DEBUG nova.virt.hardware [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 726.189807] env[61898]: DEBUG nova.virt.hardware [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.190011] env[61898]: DEBUG nova.virt.hardware [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 726.190170] env[61898]: DEBUG nova.virt.hardware [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.190316] env[61898]: DEBUG nova.virt.hardware [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 726.190569] env[61898]: DEBUG nova.virt.hardware [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 726.190743] env[61898]: DEBUG nova.virt.hardware [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 726.190909] env[61898]: DEBUG nova.virt.hardware [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 726.191617] env[61898]: DEBUG nova.virt.hardware [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 726.191842] env[61898]: DEBUG nova.virt.hardware [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 726.192704] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8918cb-c8e5-4a18-b34e-c79fae8f46f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.201365] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3532dcc6-80b4-448c-be24-f01493651feb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.215394] env[61898]: ERROR nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f990e522-d7e8-455a-b83a-fda1e1af7890, please check neutron logs for more information. [ 726.215394] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] Traceback (most recent call last): [ 726.215394] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/compute/manager.py", line 2900, in _build_resources [ 726.215394] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] yield resources [ 726.215394] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 726.215394] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] self.driver.spawn(context, instance, image_meta, [ 726.215394] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 726.215394] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 726.215394] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 726.215394] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] vm_ref = self.build_virtual_machine(instance, [ 726.215394] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] vif_infos = vmwarevif.get_vif_info(self._session, [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] for vif in network_info: [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] return self._sync_wrapper(fn, *args, **kwargs) [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] self.wait() [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] self[:] = self._gt.wait() [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] return self._exit_event.wait() [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 726.215833] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] current.throw(*self._exc) [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] result = function(*args, **kwargs) [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] return func(*args, **kwargs) [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] raise e [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] nwinfo = self.network_api.allocate_for_instance( [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] created_port_ids = self._update_ports_for_instance( [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] with excutils.save_and_reraise_exception(): [ 726.216254] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 726.216685] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] self.force_reraise() [ 726.216685] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 726.216685] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] raise self.value [ 726.216685] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 726.216685] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] updated_port = self._update_port( [ 726.216685] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 726.216685] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] _ensure_no_port_binding_failure(port) [ 726.216685] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 726.216685] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] raise exception.PortBindingFailed(port_id=port['id']) [ 726.216685] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] nova.exception.PortBindingFailed: Binding failed for port f990e522-d7e8-455a-b83a-fda1e1af7890, please check neutron logs for more information. [ 726.216685] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] [ 726.216685] env[61898]: INFO nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Terminating instance [ 726.317089] env[61898]: DEBUG oslo_concurrency.lockutils [req-3e145dbc-2a6a-4178-bfb3-ac22f5f9fc45 req-3fc77686-57d0-42d8-a978-d45b4e8d0073 service nova] Releasing lock "refresh_cache-455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.317445] env[61898]: DEBUG nova.compute.manager [req-3e145dbc-2a6a-4178-bfb3-ac22f5f9fc45 req-3fc77686-57d0-42d8-a978-d45b4e8d0073 service nova] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Received event network-vif-deleted-9b1609cf-96a3-4538-a166-e3e53aff506f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 726.317839] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Acquired lock "refresh_cache-455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.318043] env[61898]: DEBUG nova.network.neutron [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 726.578399] env[61898]: DEBUG nova.network.neutron [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.654563] env[61898]: DEBUG nova.network.neutron [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.720200] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Acquiring lock "refresh_cache-2887126b-6db5-4578-a063-552e774542cc" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.720376] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Acquired lock "refresh_cache-2887126b-6db5-4578-a063-552e774542cc" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.720621] env[61898]: DEBUG nova.network.neutron [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 726.832914] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbbdc66-3fb9-415e-a8c5-0e6f40b83649 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.840052] env[61898]: DEBUG nova.network.neutron [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.842670] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2876b74a-2d7b-44be-979e-ef7040860805 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.878432] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24545f5-4083-4a89-9832-b29f0167fb06 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.886322] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b17dcce-a71e-40ec-a0b7-86a4f6437c06 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.904097] env[61898]: DEBUG nova.compute.provider_tree [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.975133] env[61898]: DEBUG nova.network.neutron [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.156851] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Releasing lock "refresh_cache-4a6adf12-7106-46ce-abb0-fe8c5c212905" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.157206] env[61898]: DEBUG nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 727.157388] env[61898]: DEBUG nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 727.157553] env[61898]: DEBUG nova.network.neutron [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 727.173291] env[61898]: DEBUG nova.network.neutron [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.211331] env[61898]: DEBUG nova.compute.manager [req-37338f59-e189-47da-93ed-5fefeaacd55a req-5d27e32d-cf62-4718-b413-fad4d75fa2fa service nova] [instance: 2887126b-6db5-4578-a063-552e774542cc] Received event network-changed-f990e522-d7e8-455a-b83a-fda1e1af7890 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 727.211331] env[61898]: DEBUG nova.compute.manager [req-37338f59-e189-47da-93ed-5fefeaacd55a req-5d27e32d-cf62-4718-b413-fad4d75fa2fa service nova] [instance: 2887126b-6db5-4578-a063-552e774542cc] Refreshing instance network info cache due to event network-changed-f990e522-d7e8-455a-b83a-fda1e1af7890. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 727.211331] env[61898]: DEBUG oslo_concurrency.lockutils [req-37338f59-e189-47da-93ed-5fefeaacd55a req-5d27e32d-cf62-4718-b413-fad4d75fa2fa service nova] Acquiring lock "refresh_cache-2887126b-6db5-4578-a063-552e774542cc" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.238686] env[61898]: DEBUG nova.network.neutron [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.339388] env[61898]: DEBUG nova.network.neutron [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.409502] env[61898]: DEBUG nova.scheduler.client.report [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 727.478926] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Releasing lock "refresh_cache-455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.479459] env[61898]: DEBUG nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 727.479804] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e490bfc0-9f7b-42f2-a710-85b6d9f131ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.489116] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19153c5b-abbd-47f5-8879-47f7f0611344 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.512785] env[61898]: WARNING nova.virt.vmwareapi.driver [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3 could not be found. [ 727.512785] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 727.512785] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44a79528-af27-4eba-b8ff-0780a3ff568a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.519121] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc7a761-17b7-452a-92e4-15b7eb58766f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.540687] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3 could not be found. [ 727.540999] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 727.541231] env[61898]: INFO nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Took 0.06 seconds to destroy the instance on the hypervisor. [ 727.541511] env[61898]: DEBUG oslo.service.loopingcall [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 727.541767] env[61898]: DEBUG nova.compute.manager [-] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 727.541871] env[61898]: DEBUG nova.network.neutron [-] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 727.562700] env[61898]: DEBUG nova.network.neutron [-] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.675838] env[61898]: DEBUG nova.network.neutron [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.843926] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Releasing lock "refresh_cache-2887126b-6db5-4578-a063-552e774542cc" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.843926] env[61898]: DEBUG nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 727.843926] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 727.843926] env[61898]: DEBUG oslo_concurrency.lockutils [req-37338f59-e189-47da-93ed-5fefeaacd55a req-5d27e32d-cf62-4718-b413-fad4d75fa2fa service nova] Acquired lock "refresh_cache-2887126b-6db5-4578-a063-552e774542cc" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.843926] env[61898]: DEBUG nova.network.neutron [req-37338f59-e189-47da-93ed-5fefeaacd55a req-5d27e32d-cf62-4718-b413-fad4d75fa2fa service nova] [instance: 2887126b-6db5-4578-a063-552e774542cc] Refreshing network info cache for port f990e522-d7e8-455a-b83a-fda1e1af7890 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.844461] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-720707ab-08a6-423d-9012-f47e26dbc354 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.856137] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192f8505-a21b-4062-b7b3-cbb2f586b214 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.877726] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2887126b-6db5-4578-a063-552e774542cc could not be found. [ 727.877964] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 727.878158] env[61898]: INFO nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 727.878395] env[61898]: DEBUG oslo.service.loopingcall [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 727.878614] env[61898]: DEBUG nova.compute.manager [-] [instance: 2887126b-6db5-4578-a063-552e774542cc] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 727.878710] env[61898]: DEBUG nova.network.neutron [-] [instance: 2887126b-6db5-4578-a063-552e774542cc] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 727.893524] env[61898]: DEBUG nova.network.neutron [-] [instance: 2887126b-6db5-4578-a063-552e774542cc] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.914880] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.863s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.915511] env[61898]: ERROR nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 41d5d6de-432b-43ed-a989-213d3b4114de, please check neutron logs for more information. [ 727.915511] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] Traceback (most recent call last): [ 727.915511] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 727.915511] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] self.driver.spawn(context, instance, image_meta, [ 727.915511] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 727.915511] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 727.915511] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 727.915511] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] vm_ref = self.build_virtual_machine(instance, [ 727.915511] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 727.915511] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] vif_infos = vmwarevif.get_vif_info(self._session, [ 727.915511] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] for vif in network_info: [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] return self._sync_wrapper(fn, *args, **kwargs) [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] self.wait() [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] self[:] = self._gt.wait() [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] return self._exit_event.wait() [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] result = hub.switch() [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 727.915853] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] return self.greenlet.switch() [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] result = function(*args, **kwargs) [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] return func(*args, **kwargs) [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] raise e [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] nwinfo = self.network_api.allocate_for_instance( [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] created_port_ids = self._update_ports_for_instance( [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] with excutils.save_and_reraise_exception(): [ 727.916220] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 727.916560] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] self.force_reraise() [ 727.916560] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 727.916560] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] raise self.value [ 727.916560] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 727.916560] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] updated_port = self._update_port( [ 727.916560] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 727.916560] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] _ensure_no_port_binding_failure(port) [ 727.916560] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 727.916560] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] raise exception.PortBindingFailed(port_id=port['id']) [ 727.916560] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] nova.exception.PortBindingFailed: Binding failed for port 41d5d6de-432b-43ed-a989-213d3b4114de, please check neutron logs for more information. [ 727.916560] env[61898]: ERROR nova.compute.manager [instance: 8ac73bda-db02-4427-9730-003561d078ca] [ 727.916850] env[61898]: DEBUG nova.compute.utils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Binding failed for port 41d5d6de-432b-43ed-a989-213d3b4114de, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 727.917934] env[61898]: DEBUG nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Build of instance 8ac73bda-db02-4427-9730-003561d078ca was re-scheduled: Binding failed for port 41d5d6de-432b-43ed-a989-213d3b4114de, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 727.919556] env[61898]: DEBUG nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 727.919556] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquiring lock "refresh_cache-8ac73bda-db02-4427-9730-003561d078ca" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.919556] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Acquired lock "refresh_cache-8ac73bda-db02-4427-9730-003561d078ca" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.919556] env[61898]: DEBUG nova.network.neutron [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 727.919958] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.919s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.922217] env[61898]: INFO nova.compute.claims [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 728.065507] env[61898]: DEBUG nova.network.neutron [-] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.178325] env[61898]: INFO nova.compute.manager [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 4a6adf12-7106-46ce-abb0-fe8c5c212905] Took 1.02 seconds to deallocate network for instance. [ 728.371666] env[61898]: DEBUG nova.network.neutron [req-37338f59-e189-47da-93ed-5fefeaacd55a req-5d27e32d-cf62-4718-b413-fad4d75fa2fa service nova] [instance: 2887126b-6db5-4578-a063-552e774542cc] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.398807] env[61898]: DEBUG nova.network.neutron [-] [instance: 2887126b-6db5-4578-a063-552e774542cc] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.455745] env[61898]: DEBUG nova.network.neutron [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.480345] env[61898]: DEBUG nova.network.neutron [req-37338f59-e189-47da-93ed-5fefeaacd55a req-5d27e32d-cf62-4718-b413-fad4d75fa2fa service nova] [instance: 2887126b-6db5-4578-a063-552e774542cc] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.537739] env[61898]: DEBUG nova.network.neutron [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.569485] env[61898]: INFO nova.compute.manager [-] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Took 1.03 seconds to deallocate network for instance. [ 728.902724] env[61898]: INFO nova.compute.manager [-] [instance: 2887126b-6db5-4578-a063-552e774542cc] Took 1.02 seconds to deallocate network for instance. [ 728.905105] env[61898]: DEBUG nova.compute.claims [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 728.905312] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.938591] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.938865] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.982479] env[61898]: DEBUG oslo_concurrency.lockutils [req-37338f59-e189-47da-93ed-5fefeaacd55a req-5d27e32d-cf62-4718-b413-fad4d75fa2fa service nova] Releasing lock "refresh_cache-2887126b-6db5-4578-a063-552e774542cc" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.982479] env[61898]: DEBUG nova.compute.manager [req-37338f59-e189-47da-93ed-5fefeaacd55a req-5d27e32d-cf62-4718-b413-fad4d75fa2fa service nova] [instance: 2887126b-6db5-4578-a063-552e774542cc] Received event network-vif-deleted-f990e522-d7e8-455a-b83a-fda1e1af7890 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 729.040916] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Releasing lock "refresh_cache-8ac73bda-db02-4427-9730-003561d078ca" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.041161] env[61898]: DEBUG nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 729.041322] env[61898]: DEBUG nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 729.041484] env[61898]: DEBUG nova.network.neutron [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 729.060070] env[61898]: DEBUG nova.network.neutron [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.152107] env[61898]: INFO nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Took 0.58 seconds to detach 1 volumes for instance. [ 729.155759] env[61898]: DEBUG nova.compute.claims [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Aborting claim: {{(pid=61898) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 729.155941] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.212523] env[61898]: INFO nova.scheduler.client.report [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Deleted allocations for instance 4a6adf12-7106-46ce-abb0-fe8c5c212905 [ 729.291527] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88118833-4be8-4e97-bb57-e2ed3a7edf4a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.299201] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8226182-8a5f-4f82-bfa4-13ded0d44946 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.330633] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce14d64-84fc-450b-ba51-4bf1f21516dd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.337912] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6ae327-e828-4ab0-a11b-e9ad8162f0a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.355419] env[61898]: DEBUG nova.compute.provider_tree [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.562783] env[61898]: DEBUG nova.network.neutron [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.723087] env[61898]: DEBUG oslo_concurrency.lockutils [None req-388433eb-df14-42bb-8b7e-470a63eb409d tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "4a6adf12-7106-46ce-abb0-fe8c5c212905" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 158.245s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.863927] env[61898]: DEBUG nova.scheduler.client.report [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 730.068847] env[61898]: INFO nova.compute.manager [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] [instance: 8ac73bda-db02-4427-9730-003561d078ca] Took 1.03 seconds to deallocate network for instance. [ 730.227195] env[61898]: DEBUG nova.compute.manager [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 730.368902] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.449s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.369432] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 730.371975] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.413s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.373336] env[61898]: INFO nova.compute.claims [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 730.748989] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.878542] env[61898]: DEBUG nova.compute.utils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 730.884339] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 730.884339] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 730.928906] env[61898]: DEBUG nova.policy [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5fb7782d2954e1d9afa64e4f570afb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a35107782b947b698807653288947cd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 731.100712] env[61898]: INFO nova.scheduler.client.report [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Deleted allocations for instance 8ac73bda-db02-4427-9730-003561d078ca [ 731.191226] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Successfully created port: 6fc02f07-c316-49a4-8c33-f5a9db5f2387 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 731.386692] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 731.611958] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7e158842-cfb3-48cb-9c33-f105c4728929 tempest-ListImageFiltersTestJSON-100942111 tempest-ListImageFiltersTestJSON-100942111-project-member] Lock "8ac73bda-db02-4427-9730-003561d078ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 159.810s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.712974] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bcff2b-8169-40b4-9397-e998dac1d0ad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.720355] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306f2f4c-c083-4eee-8133-d1a91d6edc30 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.751684] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af0ee12-c19f-4dba-8de6-de0e798005bf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.759249] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7c2ab6-be50-4faa-a9eb-44c0ed610abc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.773384] env[61898]: DEBUG nova.compute.provider_tree [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.118016] env[61898]: DEBUG nova.compute.manager [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 732.276854] env[61898]: DEBUG nova.scheduler.client.report [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 732.402227] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 732.426255] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 732.426522] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 732.426692] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.426908] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 732.427068] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.427215] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 732.427417] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 732.427574] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 732.427737] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 732.427895] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 732.428072] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 732.429314] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9842a60a-64c4-4cd2-bc64-e9edce7a7dd7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.437545] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e340fa1-52f6-4609-a34b-dd32ef6e0f3a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.641256] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.783331] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.784169] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 732.791939] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.984s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.876754] env[61898]: DEBUG nova.compute.manager [req-b2edcaeb-8383-40dd-9f54-920df3ac46b8 req-98802950-f3db-44b1-974a-36091833f6ea service nova] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Received event network-vif-plugged-6fc02f07-c316-49a4-8c33-f5a9db5f2387 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 732.876841] env[61898]: DEBUG oslo_concurrency.lockutils [req-b2edcaeb-8383-40dd-9f54-920df3ac46b8 req-98802950-f3db-44b1-974a-36091833f6ea service nova] Acquiring lock "9e6a3749-1974-4818-9cc6-76367d41b7e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.877044] env[61898]: DEBUG oslo_concurrency.lockutils [req-b2edcaeb-8383-40dd-9f54-920df3ac46b8 req-98802950-f3db-44b1-974a-36091833f6ea service nova] Lock "9e6a3749-1974-4818-9cc6-76367d41b7e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.877570] env[61898]: DEBUG oslo_concurrency.lockutils [req-b2edcaeb-8383-40dd-9f54-920df3ac46b8 req-98802950-f3db-44b1-974a-36091833f6ea service nova] Lock "9e6a3749-1974-4818-9cc6-76367d41b7e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.877570] env[61898]: DEBUG nova.compute.manager [req-b2edcaeb-8383-40dd-9f54-920df3ac46b8 req-98802950-f3db-44b1-974a-36091833f6ea service nova] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] No waiting events found dispatching network-vif-plugged-6fc02f07-c316-49a4-8c33-f5a9db5f2387 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 732.877570] env[61898]: WARNING nova.compute.manager [req-b2edcaeb-8383-40dd-9f54-920df3ac46b8 req-98802950-f3db-44b1-974a-36091833f6ea service nova] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Received unexpected event network-vif-plugged-6fc02f07-c316-49a4-8c33-f5a9db5f2387 for instance with vm_state building and task_state spawning. [ 733.075166] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Successfully updated port: 6fc02f07-c316-49a4-8c33-f5a9db5f2387 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 733.296167] env[61898]: DEBUG nova.compute.utils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 733.300921] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 733.300921] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 733.368238] env[61898]: DEBUG nova.policy [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5fb7782d2954e1d9afa64e4f570afb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a35107782b947b698807653288947cd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 733.577755] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "refresh_cache-9e6a3749-1974-4818-9cc6-76367d41b7e5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.577906] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired lock "refresh_cache-9e6a3749-1974-4818-9cc6-76367d41b7e5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.578071] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 733.620638] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39466e1b-5cf6-40bc-ba83-c58f605ada72 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.628691] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a388277b-d0ba-417e-9731-e8aa0304626b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.658778] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562fb2d5-ca7f-4365-887f-d73271c40e95 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.669059] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c5e240-a3f7-410e-beeb-0c7a8606e67e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.682907] env[61898]: DEBUG nova.compute.provider_tree [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.806640] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 733.897923] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Successfully created port: 3f1074c7-8a0c-43aa-876a-aeccfb82877f {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 734.148622] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 734.188374] env[61898]: DEBUG nova.scheduler.client.report [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 734.415423] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Updating instance_info_cache with network_info: [{"id": "6fc02f07-c316-49a4-8c33-f5a9db5f2387", "address": "fa:16:3e:bb:a4:e3", "network": {"id": "f7889fd1-d8a9-4fbd-afd4-838f9a74bb1f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-647814482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a35107782b947b698807653288947cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc02f07-c3", "ovs_interfaceid": "6fc02f07-c316-49a4-8c33-f5a9db5f2387", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.698025] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.906s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.698025] env[61898]: ERROR nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b2b0ebbb-4da8-43f9-b572-941ed83565aa, please check neutron logs for more information. [ 734.698025] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Traceback (most recent call last): [ 734.698025] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 734.698025] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] self.driver.spawn(context, instance, image_meta, [ 734.698025] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 734.698025] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 734.698025] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 734.698025] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] vm_ref = self.build_virtual_machine(instance, [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] vif_infos = vmwarevif.get_vif_info(self._session, [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] for vif in network_info: [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] return self._sync_wrapper(fn, *args, **kwargs) [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] self.wait() [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] self[:] = self._gt.wait() [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] return self._exit_event.wait() [ 734.698605] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] current.throw(*self._exc) [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] result = function(*args, **kwargs) [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] return func(*args, **kwargs) [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] raise e [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] nwinfo = self.network_api.allocate_for_instance( [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] created_port_ids = self._update_ports_for_instance( [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 734.699022] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] with excutils.save_and_reraise_exception(): [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] self.force_reraise() [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] raise self.value [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] updated_port = self._update_port( [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] _ensure_no_port_binding_failure(port) [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] raise exception.PortBindingFailed(port_id=port['id']) [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] nova.exception.PortBindingFailed: Binding failed for port b2b0ebbb-4da8-43f9-b572-941ed83565aa, please check neutron logs for more information. [ 734.699464] env[61898]: ERROR nova.compute.manager [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] [ 734.699822] env[61898]: DEBUG nova.compute.utils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Binding failed for port b2b0ebbb-4da8-43f9-b572-941ed83565aa, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 734.699822] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.782s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.702727] env[61898]: DEBUG nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Build of instance 10e3f3dd-165b-4049-8c1f-f561c91717c0 was re-scheduled: Binding failed for port b2b0ebbb-4da8-43f9-b572-941ed83565aa, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 734.703358] env[61898]: DEBUG nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 734.703726] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Acquiring lock "refresh_cache-10e3f3dd-165b-4049-8c1f-f561c91717c0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.704038] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Acquired lock "refresh_cache-10e3f3dd-165b-4049-8c1f-f561c91717c0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.704344] env[61898]: DEBUG nova.network.neutron [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.814325] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 734.837780] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 734.838062] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 734.838217] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 734.838394] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 734.838700] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 734.838700] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 734.838879] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 734.839040] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 734.839202] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 734.840030] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 734.840030] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 734.841096] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7fac9a-a260-49ba-ac4b-3472c7a10b01 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.848754] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b50104-e275-4310-ae0d-e04fd3cc8635 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.918556] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Releasing lock "refresh_cache-9e6a3749-1974-4818-9cc6-76367d41b7e5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.918900] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Instance network_info: |[{"id": "6fc02f07-c316-49a4-8c33-f5a9db5f2387", "address": "fa:16:3e:bb:a4:e3", "network": {"id": "f7889fd1-d8a9-4fbd-afd4-838f9a74bb1f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-647814482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a35107782b947b698807653288947cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc02f07-c3", "ovs_interfaceid": "6fc02f07-c316-49a4-8c33-f5a9db5f2387", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 734.919426] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:a4:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20e3f794-c7a3-4696-9488-ecf34c570ef9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6fc02f07-c316-49a4-8c33-f5a9db5f2387', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.929370] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Creating folder: Project (7a35107782b947b698807653288947cd). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.930707] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf57d8eb-3b27-4206-9ce2-fc75e55ff017 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.934890] env[61898]: DEBUG nova.compute.manager [req-aed56362-b995-4198-8853-fbcfd17464f3 req-f96637dd-c896-4d23-a274-166a3e9a9f1e service nova] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Received event network-changed-6fc02f07-c316-49a4-8c33-f5a9db5f2387 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 734.935122] env[61898]: DEBUG nova.compute.manager [req-aed56362-b995-4198-8853-fbcfd17464f3 req-f96637dd-c896-4d23-a274-166a3e9a9f1e service nova] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Refreshing instance network info cache due to event network-changed-6fc02f07-c316-49a4-8c33-f5a9db5f2387. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 734.935340] env[61898]: DEBUG oslo_concurrency.lockutils [req-aed56362-b995-4198-8853-fbcfd17464f3 req-f96637dd-c896-4d23-a274-166a3e9a9f1e service nova] Acquiring lock "refresh_cache-9e6a3749-1974-4818-9cc6-76367d41b7e5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.935477] env[61898]: DEBUG oslo_concurrency.lockutils [req-aed56362-b995-4198-8853-fbcfd17464f3 req-f96637dd-c896-4d23-a274-166a3e9a9f1e service nova] Acquired lock "refresh_cache-9e6a3749-1974-4818-9cc6-76367d41b7e5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.935815] env[61898]: DEBUG nova.network.neutron [req-aed56362-b995-4198-8853-fbcfd17464f3 req-f96637dd-c896-4d23-a274-166a3e9a9f1e service nova] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Refreshing network info cache for port 6fc02f07-c316-49a4-8c33-f5a9db5f2387 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 734.947796] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Created folder: Project (7a35107782b947b698807653288947cd) in parent group-v267550. [ 734.948090] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Creating folder: Instances. Parent ref: group-v267579. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.948248] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35dd0ac4-d82d-4c22-87b0-3a6d40b7cc88 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.958295] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Created folder: Instances in parent group-v267579. [ 734.958295] env[61898]: DEBUG oslo.service.loopingcall [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.958295] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 734.958295] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eabbfa8c-02cc-4dbd-9fbb-a15ef2ea7044 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.979949] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.979949] env[61898]: value = "task-1240452" [ 734.979949] env[61898]: _type = "Task" [ 734.979949] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.986975] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240452, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.229176] env[61898]: DEBUG nova.network.neutron [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 735.312716] env[61898]: DEBUG nova.network.neutron [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.489536] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240452, 'name': CreateVM_Task, 'duration_secs': 0.29487} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.490086] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 735.500498] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 735.500498] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.500853] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 735.501015] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc3a1ef3-382e-46fd-8de2-f887b575271a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.507880] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 735.507880] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d483ef-ce48-51f8-e02d-be81c149f5a8" [ 735.507880] env[61898]: _type = "Task" [ 735.507880] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.515836] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d483ef-ce48-51f8-e02d-be81c149f5a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.531893] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8e940d-f704-4518-be06-38abe4eb4da6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.540506] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1be1382-0046-472e-931e-9d1cd8656ecf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.572015] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2d3910-f42f-4622-89a7-2b3694f3210c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.578842] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfeb465-f75f-4580-9b35-8b82a2c573cb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.592755] env[61898]: DEBUG nova.compute.provider_tree [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.636051] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Successfully updated port: 3f1074c7-8a0c-43aa-876a-aeccfb82877f {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 735.686851] env[61898]: DEBUG nova.network.neutron [req-aed56362-b995-4198-8853-fbcfd17464f3 req-f96637dd-c896-4d23-a274-166a3e9a9f1e service nova] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Updated VIF entry in instance network info cache for port 6fc02f07-c316-49a4-8c33-f5a9db5f2387. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 735.687210] env[61898]: DEBUG nova.network.neutron [req-aed56362-b995-4198-8853-fbcfd17464f3 req-f96637dd-c896-4d23-a274-166a3e9a9f1e service nova] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Updating instance_info_cache with network_info: [{"id": "6fc02f07-c316-49a4-8c33-f5a9db5f2387", "address": "fa:16:3e:bb:a4:e3", "network": {"id": "f7889fd1-d8a9-4fbd-afd4-838f9a74bb1f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-647814482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a35107782b947b698807653288947cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc02f07-c3", "ovs_interfaceid": "6fc02f07-c316-49a4-8c33-f5a9db5f2387", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.816093] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Releasing lock "refresh_cache-10e3f3dd-165b-4049-8c1f-f561c91717c0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.816412] env[61898]: DEBUG nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 735.816602] env[61898]: DEBUG nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 735.816765] env[61898]: DEBUG nova.network.neutron [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 735.836633] env[61898]: DEBUG nova.network.neutron [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.018677] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d483ef-ce48-51f8-e02d-be81c149f5a8, 'name': SearchDatastore_Task, 'duration_secs': 0.008783} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.018909] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.019583] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 736.019583] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.019583] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.019821] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 736.019932] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e20115c2-51d1-4aaa-b8a0-c6e851647ab5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.030930] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 736.030930] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 736.030930] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-812199c3-6622-468f-837b-88e1bb708712 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.034364] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 736.034364] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a69262-685f-3b33-424f-56b57b66c110" [ 736.034364] env[61898]: _type = "Task" [ 736.034364] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.043512] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a69262-685f-3b33-424f-56b57b66c110, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.101388] env[61898]: DEBUG nova.scheduler.client.report [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 736.143039] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "refresh_cache-29eadea9-fa85-4f51-97d0-a941e1658094" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.143039] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired lock "refresh_cache-29eadea9-fa85-4f51-97d0-a941e1658094" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.143039] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 736.190399] env[61898]: DEBUG oslo_concurrency.lockutils [req-aed56362-b995-4198-8853-fbcfd17464f3 req-f96637dd-c896-4d23-a274-166a3e9a9f1e service nova] Releasing lock "refresh_cache-9e6a3749-1974-4818-9cc6-76367d41b7e5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.340265] env[61898]: DEBUG nova.network.neutron [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.546661] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a69262-685f-3b33-424f-56b57b66c110, 'name': SearchDatastore_Task, 'duration_secs': 0.007916} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.546661] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24ec722e-9bf2-4d91-bebf-66ecc24f6381 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.551483] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 736.551483] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52aa0a83-2c9d-c0be-11ba-a3bf7e68e4c0" [ 736.551483] env[61898]: _type = "Task" [ 736.551483] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.558838] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52aa0a83-2c9d-c0be-11ba-a3bf7e68e4c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.603507] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.905s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.603507] env[61898]: ERROR nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1cd401cb-5671-4baf-ac64-732214d54d44, please check neutron logs for more information. [ 736.603507] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Traceback (most recent call last): [ 736.603507] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 736.603507] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] self.driver.spawn(context, instance, image_meta, [ 736.603507] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 736.603507] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.603507] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 736.603507] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] vm_ref = self.build_virtual_machine(instance, [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] vif_infos = vmwarevif.get_vif_info(self._session, [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] for vif in network_info: [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] return self._sync_wrapper(fn, *args, **kwargs) [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] self.wait() [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] self[:] = self._gt.wait() [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] return self._exit_event.wait() [ 736.603881] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] result = hub.switch() [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] return self.greenlet.switch() [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] result = function(*args, **kwargs) [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] return func(*args, **kwargs) [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] raise e [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] nwinfo = self.network_api.allocate_for_instance( [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 736.604840] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] created_port_ids = self._update_ports_for_instance( [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] with excutils.save_and_reraise_exception(): [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] self.force_reraise() [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] raise self.value [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] updated_port = self._update_port( [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] _ensure_no_port_binding_failure(port) [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.605272] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] raise exception.PortBindingFailed(port_id=port['id']) [ 736.605609] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] nova.exception.PortBindingFailed: Binding failed for port 1cd401cb-5671-4baf-ac64-732214d54d44, please check neutron logs for more information. [ 736.605609] env[61898]: ERROR nova.compute.manager [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] [ 736.605609] env[61898]: DEBUG nova.compute.utils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Binding failed for port 1cd401cb-5671-4baf-ac64-732214d54d44, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 736.605609] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.956s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.606923] env[61898]: INFO nova.compute.claims [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.609366] env[61898]: DEBUG nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Build of instance 03ba4dad-5c58-4582-a36e-95de69b37474 was re-scheduled: Binding failed for port 1cd401cb-5671-4baf-ac64-732214d54d44, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 736.609793] env[61898]: DEBUG nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 736.610017] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Acquiring lock "refresh_cache-03ba4dad-5c58-4582-a36e-95de69b37474" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.610167] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Acquired lock "refresh_cache-03ba4dad-5c58-4582-a36e-95de69b37474" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.610319] env[61898]: DEBUG nova.network.neutron [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 736.682889] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 736.833865] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Updating instance_info_cache with network_info: [{"id": "3f1074c7-8a0c-43aa-876a-aeccfb82877f", "address": "fa:16:3e:7f:97:02", "network": {"id": "f7889fd1-d8a9-4fbd-afd4-838f9a74bb1f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-647814482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a35107782b947b698807653288947cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f1074c7-8a", "ovs_interfaceid": "3f1074c7-8a0c-43aa-876a-aeccfb82877f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.843241] env[61898]: INFO nova.compute.manager [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] [instance: 10e3f3dd-165b-4049-8c1f-f561c91717c0] Took 1.03 seconds to deallocate network for instance. [ 736.952020] env[61898]: DEBUG nova.compute.manager [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Received event network-vif-plugged-3f1074c7-8a0c-43aa-876a-aeccfb82877f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 736.952020] env[61898]: DEBUG oslo_concurrency.lockutils [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] Acquiring lock "29eadea9-fa85-4f51-97d0-a941e1658094-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.952020] env[61898]: DEBUG oslo_concurrency.lockutils [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] Lock "29eadea9-fa85-4f51-97d0-a941e1658094-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.952471] env[61898]: DEBUG oslo_concurrency.lockutils [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] Lock "29eadea9-fa85-4f51-97d0-a941e1658094-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.952693] env[61898]: DEBUG nova.compute.manager [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] No waiting events found dispatching network-vif-plugged-3f1074c7-8a0c-43aa-876a-aeccfb82877f {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 736.952905] env[61898]: WARNING nova.compute.manager [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Received unexpected event network-vif-plugged-3f1074c7-8a0c-43aa-876a-aeccfb82877f for instance with vm_state building and task_state spawning. [ 736.953117] env[61898]: DEBUG nova.compute.manager [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Received event network-changed-3f1074c7-8a0c-43aa-876a-aeccfb82877f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 736.953319] env[61898]: DEBUG nova.compute.manager [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Refreshing instance network info cache due to event network-changed-3f1074c7-8a0c-43aa-876a-aeccfb82877f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 736.953525] env[61898]: DEBUG oslo_concurrency.lockutils [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] Acquiring lock "refresh_cache-29eadea9-fa85-4f51-97d0-a941e1658094" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.061861] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52aa0a83-2c9d-c0be-11ba-a3bf7e68e4c0, 'name': SearchDatastore_Task, 'duration_secs': 0.009184} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.062154] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.062416] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 9e6a3749-1974-4818-9cc6-76367d41b7e5/9e6a3749-1974-4818-9cc6-76367d41b7e5.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 737.062668] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ed54aeb-99a6-4f54-a363-816186b9b8a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.070347] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 737.070347] env[61898]: value = "task-1240453" [ 737.070347] env[61898]: _type = "Task" [ 737.070347] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.079435] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240453, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.134069] env[61898]: DEBUG nova.network.neutron [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.220718] env[61898]: DEBUG nova.network.neutron [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.339719] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Releasing lock "refresh_cache-29eadea9-fa85-4f51-97d0-a941e1658094" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.339887] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Instance network_info: |[{"id": "3f1074c7-8a0c-43aa-876a-aeccfb82877f", "address": "fa:16:3e:7f:97:02", "network": {"id": "f7889fd1-d8a9-4fbd-afd4-838f9a74bb1f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-647814482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a35107782b947b698807653288947cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f1074c7-8a", "ovs_interfaceid": "3f1074c7-8a0c-43aa-876a-aeccfb82877f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 737.340277] env[61898]: DEBUG oslo_concurrency.lockutils [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] Acquired lock "refresh_cache-29eadea9-fa85-4f51-97d0-a941e1658094" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.340389] env[61898]: DEBUG nova.network.neutron [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Refreshing network info cache for port 3f1074c7-8a0c-43aa-876a-aeccfb82877f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 737.341686] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:97:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20e3f794-c7a3-4696-9488-ecf34c570ef9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f1074c7-8a0c-43aa-876a-aeccfb82877f', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 737.350968] env[61898]: DEBUG oslo.service.loopingcall [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 737.357105] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 737.358585] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a150d8d-a6ca-4c0e-a335-ae11d3db6571 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.381295] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 737.381295] env[61898]: value = "task-1240454" [ 737.381295] env[61898]: _type = "Task" [ 737.381295] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.390514] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240454, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.442920] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquiring lock "d6c96dce-13ae-411a-b52a-fee484718a8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.443329] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lock "d6c96dce-13ae-411a-b52a-fee484718a8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.580063] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240453, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488139} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.580063] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 9e6a3749-1974-4818-9cc6-76367d41b7e5/9e6a3749-1974-4818-9cc6-76367d41b7e5.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 737.580602] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 737.580602] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d25da8ba-4eb3-44b3-b685-4091e52e21f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.585891] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 737.585891] env[61898]: value = "task-1240455" [ 737.585891] env[61898]: _type = "Task" [ 737.585891] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.593754] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240455, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.602146] env[61898]: DEBUG nova.network.neutron [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Updated VIF entry in instance network info cache for port 3f1074c7-8a0c-43aa-876a-aeccfb82877f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 737.602525] env[61898]: DEBUG nova.network.neutron [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Updating instance_info_cache with network_info: [{"id": "3f1074c7-8a0c-43aa-876a-aeccfb82877f", "address": "fa:16:3e:7f:97:02", "network": {"id": "f7889fd1-d8a9-4fbd-afd4-838f9a74bb1f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-647814482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a35107782b947b698807653288947cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f1074c7-8a", "ovs_interfaceid": "3f1074c7-8a0c-43aa-876a-aeccfb82877f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.727160] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Releasing lock "refresh_cache-03ba4dad-5c58-4582-a36e-95de69b37474" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.727483] env[61898]: DEBUG nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 737.727685] env[61898]: DEBUG nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 737.727856] env[61898]: DEBUG nova.network.neutron [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 737.744548] env[61898]: DEBUG nova.network.neutron [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.875588] env[61898]: INFO nova.scheduler.client.report [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Deleted allocations for instance 10e3f3dd-165b-4049-8c1f-f561c91717c0 [ 737.893649] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240454, 'name': CreateVM_Task, 'duration_secs': 0.331065} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.896192] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 737.897504] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.897680] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.898042] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 737.898770] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04ed3bbb-eaf0-4ec3-9b28-f8d5c5d1a507 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.903450] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 737.903450] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5245ff23-aadc-14cb-e4d5-582ba15d8b9d" [ 737.903450] env[61898]: _type = "Task" [ 737.903450] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.917150] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5245ff23-aadc-14cb-e4d5-582ba15d8b9d, 'name': SearchDatastore_Task, 'duration_secs': 0.009262} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.917498] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.917736] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.917959] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.918117] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.918293] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.918887] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9140dd4-1be9-4dcd-9df6-ea443a4372df {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.922327] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eeff7de-b4a7-4ca4-b6bd-5983e025e9d6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.927024] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.927192] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 737.928157] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79a62845-7635-4446-aa14-ea5939bff8f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.933215] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d5a269-428b-47b4-8b9f-54e1a1d58ce1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.937460] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 737.937460] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ec8211-c73c-093c-21d8-e3923d7aa8e5" [ 737.937460] env[61898]: _type = "Task" [ 737.937460] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.966247] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e0bc18-cf6c-463e-808e-4baaaf8976b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.971668] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ec8211-c73c-093c-21d8-e3923d7aa8e5, 'name': SearchDatastore_Task, 'duration_secs': 0.008261} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.972646] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4a5a739-2496-4e4f-a2da-7624b89849cc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.978178] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eee1d38-2193-4522-ae54-18392c8e503e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.984025] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 737.984025] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ac021c-c1eb-e24e-563f-fb36e4851b78" [ 737.984025] env[61898]: _type = "Task" [ 737.984025] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.993716] env[61898]: DEBUG nova.compute.provider_tree [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.000871] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ac021c-c1eb-e24e-563f-fb36e4851b78, 'name': SearchDatastore_Task, 'duration_secs': 0.008323} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.000871] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.001342] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 29eadea9-fa85-4f51-97d0-a941e1658094/29eadea9-fa85-4f51-97d0-a941e1658094.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 738.001430] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb2224a1-2061-4a58-a215-39b39391cdec {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.009569] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 738.009569] env[61898]: value = "task-1240456" [ 738.009569] env[61898]: _type = "Task" [ 738.009569] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.017752] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240456, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.099029] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240455, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067697} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.099029] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 738.099029] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6009fb-f21c-4194-b129-514dec543596 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.114210] env[61898]: DEBUG oslo_concurrency.lockutils [req-74fc0aa4-f1f3-43cb-8b6c-ad694ae6c3fa req-29b20f34-9930-4557-87b8-b57857d78e11 service nova] Releasing lock "refresh_cache-29eadea9-fa85-4f51-97d0-a941e1658094" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.122747] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 9e6a3749-1974-4818-9cc6-76367d41b7e5/9e6a3749-1974-4818-9cc6-76367d41b7e5.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 738.123011] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8ffc117-32c4-48ba-a6bd-3ebc72de6326 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.141845] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 738.141845] env[61898]: value = "task-1240457" [ 738.141845] env[61898]: _type = "Task" [ 738.141845] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.149150] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240457, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.249079] env[61898]: DEBUG nova.network.neutron [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.389303] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fd35f4f9-e5d1-4f38-afda-28b72620eb04 tempest-ServerMetadataTestJSON-1095586092 tempest-ServerMetadataTestJSON-1095586092-project-member] Lock "10e3f3dd-165b-4049-8c1f-f561c91717c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 166.284s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.498626] env[61898]: DEBUG nova.scheduler.client.report [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 738.521404] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240456, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.653398] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240457, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.751325] env[61898]: INFO nova.compute.manager [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] [instance: 03ba4dad-5c58-4582-a36e-95de69b37474] Took 1.02 seconds to deallocate network for instance. [ 738.894864] env[61898]: DEBUG nova.compute.manager [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 739.006715] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.006715] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 739.007857] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.627s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.020831] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240456, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63316} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.021151] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 29eadea9-fa85-4f51-97d0-a941e1658094/29eadea9-fa85-4f51-97d0-a941e1658094.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 739.021383] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.021635] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e40b4d36-9ee7-469d-838e-3849e387ec2c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.028582] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 739.028582] env[61898]: value = "task-1240458" [ 739.028582] env[61898]: _type = "Task" [ 739.028582] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.036217] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240458, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.107077] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.107358] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.152502] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240457, 'name': ReconfigVM_Task, 'duration_secs': 0.562018} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.152774] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 9e6a3749-1974-4818-9cc6-76367d41b7e5/9e6a3749-1974-4818-9cc6-76367d41b7e5.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.153382] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-61e1390d-bfa7-4945-9c15-d919760aa95d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.159459] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 739.159459] env[61898]: value = "task-1240459" [ 739.159459] env[61898]: _type = "Task" [ 739.159459] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.167195] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240459, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.415014] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.513363] env[61898]: DEBUG nova.compute.utils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 739.514692] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 739.514858] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 739.538075] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240458, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062858} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.540940] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 739.542074] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d91471-28d1-4bf0-8acf-3398d808b8aa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.565281] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 29eadea9-fa85-4f51-97d0-a941e1658094/29eadea9-fa85-4f51-97d0-a941e1658094.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 739.567985] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99bc02ea-1372-4d61-9b0e-6f4013bd6c82 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.584220] env[61898]: DEBUG nova.policy [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5fb7782d2954e1d9afa64e4f570afb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a35107782b947b698807653288947cd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 739.590333] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 739.590333] env[61898]: value = "task-1240460" [ 739.590333] env[61898]: _type = "Task" [ 739.590333] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.600745] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240460, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.614396] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 739.614556] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 739.670726] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240459, 'name': Rename_Task, 'duration_secs': 0.137061} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.671013] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 739.671242] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f49d3fd-6b6c-4432-a349-a7cc447dcf8c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.678661] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 739.678661] env[61898]: value = "task-1240461" [ 739.678661] env[61898]: _type = "Task" [ 739.678661] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.688064] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240461, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.796739] env[61898]: INFO nova.scheduler.client.report [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Deleted allocations for instance 03ba4dad-5c58-4582-a36e-95de69b37474 [ 739.883156] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Successfully created port: 631e2851-f905-45a8-8e8a-f51849e4bd16 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.897022] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814b2c1e-7bf7-438e-876e-00b77ed54090 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.906078] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480dc307-881b-487a-a06b-950d1caf4b5a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.938494] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c3ed30-0fa5-4c11-bea6-ad4fcb2521a8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.947724] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe67ee5-d6bd-4a4a-846e-7da19252ab69 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.964621] env[61898]: DEBUG nova.compute.provider_tree [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.018061] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 740.104027] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240460, 'name': ReconfigVM_Task, 'duration_secs': 0.367338} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.104027] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 29eadea9-fa85-4f51-97d0-a941e1658094/29eadea9-fa85-4f51-97d0-a941e1658094.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 740.104027] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c7f0f8a2-ca55-4e94-86d0-3e56186e47d1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.112207] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 740.112207] env[61898]: value = "task-1240462" [ 740.112207] env[61898]: _type = "Task" [ 740.112207] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.121324] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240462, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.188701] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240461, 'name': PowerOnVM_Task, 'duration_secs': 0.478444} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.188988] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 740.189209] env[61898]: INFO nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Took 7.79 seconds to spawn the instance on the hypervisor. [ 740.189375] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 740.190347] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21266699-29f4-41d4-b828-9efa291bedfa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.305348] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ef45258-4197-4163-b2da-9c3af81c7d6c tempest-AttachInterfacesV270Test-1448690786 tempest-AttachInterfacesV270Test-1448690786-project-member] Lock "03ba4dad-5c58-4582-a36e-95de69b37474" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 156.230s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.467935] env[61898]: DEBUG nova.scheduler.client.report [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 740.618389] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Didn't find any instances for network info cache update. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 740.618823] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.618910] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.619117] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.619376] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.624184] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.624516] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240462, 'name': Rename_Task, 'duration_secs': 0.149448} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.624921] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.624967] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61898) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 740.625323] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 740.625550] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.626717] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-908c2d0c-db35-447a-8368-edbfb0a795ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.633981] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 740.633981] env[61898]: value = "task-1240463" [ 740.633981] env[61898]: _type = "Task" [ 740.633981] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.645218] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240463, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.709020] env[61898]: INFO nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Took 29.72 seconds to build instance. [ 740.808619] env[61898]: DEBUG nova.compute.manager [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 740.974065] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.966s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.974553] env[61898]: ERROR nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e8a2ebbb-f081-4c68-afcf-26bd33033f35, please check neutron logs for more information. [ 740.974553] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Traceback (most recent call last): [ 740.974553] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 740.974553] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] self.driver.spawn(context, instance, image_meta, [ 740.974553] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 740.974553] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] self._vmops.spawn(context, instance, image_meta, injected_files, [ 740.974553] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 740.974553] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] vm_ref = self.build_virtual_machine(instance, [ 740.974553] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 740.974553] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] vif_infos = vmwarevif.get_vif_info(self._session, [ 740.974553] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] for vif in network_info: [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] return self._sync_wrapper(fn, *args, **kwargs) [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] self.wait() [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] self[:] = self._gt.wait() [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] return self._exit_event.wait() [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] current.throw(*self._exc) [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 740.974977] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] result = function(*args, **kwargs) [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] return func(*args, **kwargs) [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] raise e [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] nwinfo = self.network_api.allocate_for_instance( [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] created_port_ids = self._update_ports_for_instance( [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] with excutils.save_and_reraise_exception(): [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] self.force_reraise() [ 740.975445] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.975859] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] raise self.value [ 740.975859] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 740.975859] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] updated_port = self._update_port( [ 740.975859] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.975859] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] _ensure_no_port_binding_failure(port) [ 740.975859] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.975859] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] raise exception.PortBindingFailed(port_id=port['id']) [ 740.975859] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] nova.exception.PortBindingFailed: Binding failed for port e8a2ebbb-f081-4c68-afcf-26bd33033f35, please check neutron logs for more information. [ 740.975859] env[61898]: ERROR nova.compute.manager [instance: 45138019-b69e-459b-99cf-47a47aa58e40] [ 740.975859] env[61898]: DEBUG nova.compute.utils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Binding failed for port e8a2ebbb-f081-4c68-afcf-26bd33033f35, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 740.976510] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.071s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.979296] env[61898]: DEBUG nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Build of instance 45138019-b69e-459b-99cf-47a47aa58e40 was re-scheduled: Binding failed for port e8a2ebbb-f081-4c68-afcf-26bd33033f35, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 740.979718] env[61898]: DEBUG nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 740.979942] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-45138019-b69e-459b-99cf-47a47aa58e40" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.980098] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-45138019-b69e-459b-99cf-47a47aa58e40" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.980254] env[61898]: DEBUG nova.network.neutron [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 741.031038] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 741.060117] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 741.060375] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 741.060871] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.060871] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 741.060871] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.061269] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 741.061269] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 741.064100] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 741.064100] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 741.064100] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 741.064100] env[61898]: DEBUG nova.virt.hardware [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 741.064100] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be5efdb-ba85-4caf-ac1c-6c10cbfec274 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.072239] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1c5c7a-e929-42d8-b1ad-e8fefcfb5b75 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.130364] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.142782] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240463, 'name': PowerOnVM_Task, 'duration_secs': 0.47675} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.143140] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 741.143348] env[61898]: INFO nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Took 6.33 seconds to spawn the instance on the hypervisor. [ 741.143531] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 741.144341] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677a5a2c-755a-4336-b387-3dc5f972c026 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.210931] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "9e6a3749-1974-4818-9cc6-76367d41b7e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.260s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.335045] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.523155] env[61898]: DEBUG nova.network.neutron [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 741.665192] env[61898]: INFO nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Took 28.74 seconds to build instance. [ 741.716934] env[61898]: DEBUG nova.compute.manager [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 741.731627] env[61898]: DEBUG nova.network.neutron [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.746722] env[61898]: DEBUG nova.compute.manager [req-4d61f919-04fc-44b5-845b-f1c40df645d4 req-6ab87aa6-288c-4c56-a3e0-7b28160baa65 service nova] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Received event network-vif-plugged-631e2851-f905-45a8-8e8a-f51849e4bd16 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 741.746936] env[61898]: DEBUG oslo_concurrency.lockutils [req-4d61f919-04fc-44b5-845b-f1c40df645d4 req-6ab87aa6-288c-4c56-a3e0-7b28160baa65 service nova] Acquiring lock "a0580308-d25b-47cb-9c1c-adb763be7925-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.748144] env[61898]: DEBUG oslo_concurrency.lockutils [req-4d61f919-04fc-44b5-845b-f1c40df645d4 req-6ab87aa6-288c-4c56-a3e0-7b28160baa65 service nova] Lock "a0580308-d25b-47cb-9c1c-adb763be7925-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.748400] env[61898]: DEBUG oslo_concurrency.lockutils [req-4d61f919-04fc-44b5-845b-f1c40df645d4 req-6ab87aa6-288c-4c56-a3e0-7b28160baa65 service nova] Lock "a0580308-d25b-47cb-9c1c-adb763be7925-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.748775] env[61898]: DEBUG nova.compute.manager [req-4d61f919-04fc-44b5-845b-f1c40df645d4 req-6ab87aa6-288c-4c56-a3e0-7b28160baa65 service nova] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] No waiting events found dispatching network-vif-plugged-631e2851-f905-45a8-8e8a-f51849e4bd16 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 741.748842] env[61898]: WARNING nova.compute.manager [req-4d61f919-04fc-44b5-845b-f1c40df645d4 req-6ab87aa6-288c-4c56-a3e0-7b28160baa65 service nova] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Received unexpected event network-vif-plugged-631e2851-f905-45a8-8e8a-f51849e4bd16 for instance with vm_state building and task_state spawning. [ 741.828586] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d723f1-51d8-4625-a872-4d5adcfe77d4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.838859] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df466cf-b584-46e0-9345-945c281f64f4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.844578] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Successfully updated port: 631e2851-f905-45a8-8e8a-f51849e4bd16 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 741.877374] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d35eb1-e80b-4dc3-85a0-ce190ffaf25d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.885994] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3578623-64c1-4228-b221-fb0f23b9be2d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.901407] env[61898]: DEBUG nova.compute.provider_tree [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.167548] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "29eadea9-fa85-4f51-97d0-a941e1658094" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.177s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.236589] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-45138019-b69e-459b-99cf-47a47aa58e40" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.236836] env[61898]: DEBUG nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 742.237288] env[61898]: DEBUG nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 742.237759] env[61898]: DEBUG nova.network.neutron [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 742.244554] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.264021] env[61898]: DEBUG nova.network.neutron [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.348915] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "refresh_cache-a0580308-d25b-47cb-9c1c-adb763be7925" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.348915] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired lock "refresh_cache-a0580308-d25b-47cb-9c1c-adb763be7925" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.348915] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 742.406786] env[61898]: DEBUG nova.scheduler.client.report [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 742.670928] env[61898]: DEBUG nova.compute.manager [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 742.766185] env[61898]: DEBUG nova.network.neutron [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.903433] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 742.913972] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.937s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.914205] env[61898]: ERROR nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f990e522-d7e8-455a-b83a-fda1e1af7890, please check neutron logs for more information. [ 742.914205] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] Traceback (most recent call last): [ 742.914205] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 742.914205] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] self.driver.spawn(context, instance, image_meta, [ 742.914205] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 742.914205] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 742.914205] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 742.914205] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] vm_ref = self.build_virtual_machine(instance, [ 742.914205] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 742.914205] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] vif_infos = vmwarevif.get_vif_info(self._session, [ 742.914205] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] for vif in network_info: [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] return self._sync_wrapper(fn, *args, **kwargs) [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] self.wait() [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] self[:] = self._gt.wait() [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] return self._exit_event.wait() [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] current.throw(*self._exc) [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 742.914597] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] result = function(*args, **kwargs) [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] return func(*args, **kwargs) [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] raise e [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] nwinfo = self.network_api.allocate_for_instance( [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] created_port_ids = self._update_ports_for_instance( [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] with excutils.save_and_reraise_exception(): [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] self.force_reraise() [ 742.915057] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 742.915524] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] raise self.value [ 742.915524] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 742.915524] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] updated_port = self._update_port( [ 742.915524] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 742.915524] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] _ensure_no_port_binding_failure(port) [ 742.915524] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 742.915524] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] raise exception.PortBindingFailed(port_id=port['id']) [ 742.915524] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] nova.exception.PortBindingFailed: Binding failed for port f990e522-d7e8-455a-b83a-fda1e1af7890, please check neutron logs for more information. [ 742.915524] env[61898]: ERROR nova.compute.manager [instance: 2887126b-6db5-4578-a063-552e774542cc] [ 742.915833] env[61898]: DEBUG nova.compute.utils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Binding failed for port f990e522-d7e8-455a-b83a-fda1e1af7890, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 742.916950] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.761s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.921100] env[61898]: DEBUG nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Build of instance 2887126b-6db5-4578-a063-552e774542cc was re-scheduled: Binding failed for port f990e522-d7e8-455a-b83a-fda1e1af7890, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 742.921487] env[61898]: DEBUG nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 742.924041] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Acquiring lock "refresh_cache-2887126b-6db5-4578-a063-552e774542cc" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 742.924041] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Acquired lock "refresh_cache-2887126b-6db5-4578-a063-552e774542cc" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.924041] env[61898]: DEBUG nova.network.neutron [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 743.097337] env[61898]: DEBUG nova.network.neutron [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Updating instance_info_cache with network_info: [{"id": "631e2851-f905-45a8-8e8a-f51849e4bd16", "address": "fa:16:3e:6d:8d:dd", "network": {"id": "f7889fd1-d8a9-4fbd-afd4-838f9a74bb1f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-647814482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a35107782b947b698807653288947cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631e2851-f9", "ovs_interfaceid": "631e2851-f905-45a8-8e8a-f51849e4bd16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.194638] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 743.271309] env[61898]: INFO nova.compute.manager [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 45138019-b69e-459b-99cf-47a47aa58e40] Took 1.03 seconds to deallocate network for instance. [ 743.454708] env[61898]: DEBUG nova.network.neutron [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 743.556947] env[61898]: DEBUG nova.network.neutron [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.601080] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Releasing lock "refresh_cache-a0580308-d25b-47cb-9c1c-adb763be7925" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.601450] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Instance network_info: |[{"id": "631e2851-f905-45a8-8e8a-f51849e4bd16", "address": "fa:16:3e:6d:8d:dd", "network": {"id": "f7889fd1-d8a9-4fbd-afd4-838f9a74bb1f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-647814482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a35107782b947b698807653288947cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631e2851-f9", "ovs_interfaceid": "631e2851-f905-45a8-8e8a-f51849e4bd16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 743.601891] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:8d:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20e3f794-c7a3-4696-9488-ecf34c570ef9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '631e2851-f905-45a8-8e8a-f51849e4bd16', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 743.610589] env[61898]: DEBUG oslo.service.loopingcall [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.614989] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 743.614989] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46a79570-4636-49cb-9616-e5392ccba049 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.639611] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 743.639611] env[61898]: value = "task-1240464" [ 743.639611] env[61898]: _type = "Task" [ 743.639611] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.646412] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240464, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.854148] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3f21a6-8f9d-4904-aad9-ddacfd60a972 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.863950] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b4dbca-744e-482d-991a-8815efaaedb0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.906264] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6b9659-28db-440f-becb-05d5078dfd8d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.914935] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8fb71ad-e4c5-4b67-b58e-9337ca407e1a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.931727] env[61898]: DEBUG nova.compute.provider_tree [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.977260] env[61898]: DEBUG nova.compute.manager [req-3a61ff56-1651-4170-928a-e077db1faa0c req-477e3d87-1d98-423e-b0f3-50470c3e6982 service nova] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Received event network-changed-631e2851-f905-45a8-8e8a-f51849e4bd16 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 743.977564] env[61898]: DEBUG nova.compute.manager [req-3a61ff56-1651-4170-928a-e077db1faa0c req-477e3d87-1d98-423e-b0f3-50470c3e6982 service nova] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Refreshing instance network info cache due to event network-changed-631e2851-f905-45a8-8e8a-f51849e4bd16. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 743.977770] env[61898]: DEBUG oslo_concurrency.lockutils [req-3a61ff56-1651-4170-928a-e077db1faa0c req-477e3d87-1d98-423e-b0f3-50470c3e6982 service nova] Acquiring lock "refresh_cache-a0580308-d25b-47cb-9c1c-adb763be7925" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.977936] env[61898]: DEBUG oslo_concurrency.lockutils [req-3a61ff56-1651-4170-928a-e077db1faa0c req-477e3d87-1d98-423e-b0f3-50470c3e6982 service nova] Acquired lock "refresh_cache-a0580308-d25b-47cb-9c1c-adb763be7925" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.978465] env[61898]: DEBUG nova.network.neutron [req-3a61ff56-1651-4170-928a-e077db1faa0c req-477e3d87-1d98-423e-b0f3-50470c3e6982 service nova] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Refreshing network info cache for port 631e2851-f905-45a8-8e8a-f51849e4bd16 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 744.059782] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Releasing lock "refresh_cache-2887126b-6db5-4578-a063-552e774542cc" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.059782] env[61898]: DEBUG nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 744.059865] env[61898]: DEBUG nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 744.060116] env[61898]: DEBUG nova.network.neutron [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 744.083290] env[61898]: DEBUG nova.network.neutron [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.149643] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240464, 'name': CreateVM_Task, 'duration_secs': 0.40847} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.149643] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 744.151084] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.151084] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.151334] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 744.151493] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9628dd0f-2673-456e-b397-4973c66d3bbe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.156389] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 744.156389] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5258d5e9-a00c-4182-0b85-317a91e76d82" [ 744.156389] env[61898]: _type = "Task" [ 744.156389] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.164677] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5258d5e9-a00c-4182-0b85-317a91e76d82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.303666] env[61898]: INFO nova.scheduler.client.report [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleted allocations for instance 45138019-b69e-459b-99cf-47a47aa58e40 [ 744.436332] env[61898]: DEBUG nova.scheduler.client.report [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 744.586709] env[61898]: DEBUG nova.network.neutron [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.592740] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquiring lock "b709df92-bf56-40ed-ba48-a8fa19be8b68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.592983] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Lock "b709df92-bf56-40ed-ba48-a8fa19be8b68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.667665] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5258d5e9-a00c-4182-0b85-317a91e76d82, 'name': SearchDatastore_Task, 'duration_secs': 0.02737} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.667975] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.668304] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 744.668578] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.668730] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.668909] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 744.669189] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42d6ea40-7657-4824-bd31-86f62db0e66c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.684031] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 744.684031] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 744.684031] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66e1badf-e15e-4039-ad11-a52efcbb28ed {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.689369] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 744.689369] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5294ce1e-26b2-dbad-ef76-af936610dc8e" [ 744.689369] env[61898]: _type = "Task" [ 744.689369] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.698312] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5294ce1e-26b2-dbad-ef76-af936610dc8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.717694] env[61898]: DEBUG nova.network.neutron [req-3a61ff56-1651-4170-928a-e077db1faa0c req-477e3d87-1d98-423e-b0f3-50470c3e6982 service nova] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Updated VIF entry in instance network info cache for port 631e2851-f905-45a8-8e8a-f51849e4bd16. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 744.717694] env[61898]: DEBUG nova.network.neutron [req-3a61ff56-1651-4170-928a-e077db1faa0c req-477e3d87-1d98-423e-b0f3-50470c3e6982 service nova] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Updating instance_info_cache with network_info: [{"id": "631e2851-f905-45a8-8e8a-f51849e4bd16", "address": "fa:16:3e:6d:8d:dd", "network": {"id": "f7889fd1-d8a9-4fbd-afd4-838f9a74bb1f", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-647814482-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a35107782b947b698807653288947cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap631e2851-f9", "ovs_interfaceid": "631e2851-f905-45a8-8e8a-f51849e4bd16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.813643] env[61898]: DEBUG oslo_concurrency.lockutils [None req-40a850b1-f685-44e4-9049-ddd4dab328dc tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "45138019-b69e-459b-99cf-47a47aa58e40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 152.888s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.942855] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.026s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.943520] env[61898]: ERROR nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9b1609cf-96a3-4538-a166-e3e53aff506f, please check neutron logs for more information. [ 744.943520] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Traceback (most recent call last): [ 744.943520] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/compute/manager.py", line 2647, in _build_and_run_instance [ 744.943520] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] self.driver.spawn(context, instance, image_meta, [ 744.943520] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 542, in spawn [ 744.943520] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 744.943520] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 744.943520] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] vm_ref = self.build_virtual_machine(instance, [ 744.943520] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 744.943520] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] vif_infos = vmwarevif.get_vif_info(self._session, [ 744.943520] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] for vif in network_info: [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] return self._sync_wrapper(fn, *args, **kwargs) [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] self.wait() [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] self[:] = self._gt.wait() [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] return self._exit_event.wait() [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] current.throw(*self._exc) [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 744.944107] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] result = function(*args, **kwargs) [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] return func(*args, **kwargs) [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/compute/manager.py", line 2017, in _allocate_network_async [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] raise e [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/compute/manager.py", line 1995, in _allocate_network_async [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] nwinfo = self.network_api.allocate_for_instance( [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/neutron.py", line 1278, in allocate_for_instance [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] created_port_ids = self._update_ports_for_instance( [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/neutron.py", line 1414, in _update_ports_for_instance [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] with excutils.save_and_reraise_exception(): [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] self.force_reraise() [ 744.944663] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 744.945650] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] raise self.value [ 744.945650] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/neutron.py", line 1389, in _update_ports_for_instance [ 744.945650] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] updated_port = self._update_port( [ 744.945650] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 744.945650] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] _ensure_no_port_binding_failure(port) [ 744.945650] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 744.945650] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] raise exception.PortBindingFailed(port_id=port['id']) [ 744.945650] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] nova.exception.PortBindingFailed: Binding failed for port 9b1609cf-96a3-4538-a166-e3e53aff506f, please check neutron logs for more information. [ 744.945650] env[61898]: ERROR nova.compute.manager [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] [ 744.945650] env[61898]: DEBUG nova.compute.utils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Binding failed for port 9b1609cf-96a3-4538-a166-e3e53aff506f, please check neutron logs for more information. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 744.946084] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.197s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.946939] env[61898]: INFO nova.compute.claims [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.949536] env[61898]: DEBUG nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Build of instance 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3 was re-scheduled: Binding failed for port 9b1609cf-96a3-4538-a166-e3e53aff506f, please check neutron logs for more information. {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2486}} [ 744.949937] env[61898]: DEBUG nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Unplugging VIFs for instance {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3012}} [ 744.950169] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Acquiring lock "refresh_cache-455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.950315] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Acquired lock "refresh_cache-455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.950504] env[61898]: DEBUG nova.network.neutron [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.093461] env[61898]: INFO nova.compute.manager [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] [instance: 2887126b-6db5-4578-a063-552e774542cc] Took 1.03 seconds to deallocate network for instance. [ 745.201045] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5294ce1e-26b2-dbad-ef76-af936610dc8e, 'name': SearchDatastore_Task, 'duration_secs': 0.044253} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.202047] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d968635-dad3-49aa-9e62-7d215669bb17 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.207409] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 745.207409] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5293b32d-f19d-630d-4e9a-b62e2f12dbe1" [ 745.207409] env[61898]: _type = "Task" [ 745.207409] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.215486] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5293b32d-f19d-630d-4e9a-b62e2f12dbe1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.219477] env[61898]: DEBUG oslo_concurrency.lockutils [req-3a61ff56-1651-4170-928a-e077db1faa0c req-477e3d87-1d98-423e-b0f3-50470c3e6982 service nova] Releasing lock "refresh_cache-a0580308-d25b-47cb-9c1c-adb763be7925" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.316399] env[61898]: DEBUG nova.compute.manager [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 745.473528] env[61898]: DEBUG nova.network.neutron [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.563624] env[61898]: DEBUG nova.network.neutron [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.718252] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5293b32d-f19d-630d-4e9a-b62e2f12dbe1, 'name': SearchDatastore_Task, 'duration_secs': 0.009249} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.718530] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.718786] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] a0580308-d25b-47cb-9c1c-adb763be7925/a0580308-d25b-47cb-9c1c-adb763be7925.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 745.719118] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-807eda29-b3be-4bb7-8234-a74111ebd4a0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.725791] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 745.725791] env[61898]: value = "task-1240465" [ 745.725791] env[61898]: _type = "Task" [ 745.725791] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.733394] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240465, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.838136] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.066916] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Releasing lock "refresh_cache-455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.066916] env[61898]: DEBUG nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61898) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3035}} [ 746.067361] env[61898]: DEBUG nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 746.067361] env[61898]: DEBUG nova.network.neutron [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 746.088177] env[61898]: DEBUG nova.network.neutron [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.124032] env[61898]: INFO nova.scheduler.client.report [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Deleted allocations for instance 2887126b-6db5-4578-a063-552e774542cc [ 746.238747] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240465, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509651} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.239155] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] a0580308-d25b-47cb-9c1c-adb763be7925/a0580308-d25b-47cb-9c1c-adb763be7925.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 746.239533] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 746.239883] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a4b7211-82af-40df-83e5-852b7668c2f7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.248454] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 746.248454] env[61898]: value = "task-1240466" [ 746.248454] env[61898]: _type = "Task" [ 746.248454] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.257408] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240466, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.260582] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df066a8b-0c12-4407-84c4-9cb8670b3b04 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.266223] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e019621f-1924-47eb-855c-c3302fd6a90d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.308302] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75aa8664-d5fb-4603-a15c-241621e97412 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.315141] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f1df7d-ef7e-438b-9cf5-103513062a9d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.329239] env[61898]: DEBUG nova.compute.provider_tree [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.591175] env[61898]: DEBUG nova.network.neutron [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.631758] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c3ea2891-3dc4-4c3a-bf4f-ebda1cbbd9f5 tempest-ServersNegativeTestMultiTenantJSON-222742685 tempest-ServersNegativeTestMultiTenantJSON-222742685-project-member] Lock "2887126b-6db5-4578-a063-552e774542cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.260s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.757535] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240466, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.056686} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.757809] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 746.758670] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef87de8a-2713-4689-a498-621b65364535 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.780903] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] a0580308-d25b-47cb-9c1c-adb763be7925/a0580308-d25b-47cb-9c1c-adb763be7925.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 746.781599] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd4312e4-bdd3-4290-adf0-0de13c3a5f0e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.804861] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 746.804861] env[61898]: value = "task-1240467" [ 746.804861] env[61898]: _type = "Task" [ 746.804861] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.813159] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240467, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.832486] env[61898]: DEBUG nova.scheduler.client.report [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 746.980254] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "4db53fdf-7107-43c5-a57c-65d54b807909" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.980254] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "4db53fdf-7107-43c5-a57c-65d54b807909" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.094836] env[61898]: INFO nova.compute.manager [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] [instance: 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3] Took 1.03 seconds to deallocate network for instance. [ 747.135796] env[61898]: DEBUG nova.compute.manager [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 747.314381] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240467, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.337456] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.392s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.337960] env[61898]: DEBUG nova.compute.manager [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 747.340731] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.700s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.342115] env[61898]: INFO nova.compute.claims [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.657381] env[61898]: DEBUG oslo_concurrency.lockutils [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.814781] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240467, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.846558] env[61898]: DEBUG nova.compute.utils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 747.849764] env[61898]: DEBUG nova.compute.manager [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 747.852152] env[61898]: DEBUG nova.network.neutron [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 747.894797] env[61898]: DEBUG nova.policy [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e50ee88de9d4d67b7d4222dfe117256', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98ca09762c2e4b119437aa5b1a36e133', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 748.135356] env[61898]: INFO nova.scheduler.client.report [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Deleted allocations for instance 455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3 [ 748.205198] env[61898]: DEBUG nova.network.neutron [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Successfully created port: a9c5b33c-5075-4ced-8700-0ca1e0071262 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.321336] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240467, 'name': ReconfigVM_Task, 'duration_secs': 1.161567} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.321642] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Reconfigured VM instance instance-00000033 to attach disk [datastore2] a0580308-d25b-47cb-9c1c-adb763be7925/a0580308-d25b-47cb-9c1c-adb763be7925.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 748.324850] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a5b3c1a-3356-4a48-9e43-4e2cc830b69a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.331605] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 748.331605] env[61898]: value = "task-1240468" [ 748.331605] env[61898]: _type = "Task" [ 748.331605] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.340364] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240468, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.351085] env[61898]: DEBUG nova.compute.manager [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 748.640081] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c193fc76-52e7-4e04-97ba-32ad2f3929f2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.643041] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bb56c0c2-7826-4b72-95fd-e94ce6e81552 tempest-ServersTestBootFromVolume-715013200 tempest-ServersTestBootFromVolume-715013200-project-member] Lock "455bddc2-0afd-4ebb-9fcd-ebbbeca38ae3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 151.275s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.649092] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b99bcb-004a-41cc-89dc-862193cbc27d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.681363] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f443b752-248a-4bed-81e0-7cbf45f6c103 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.689448] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9993c74d-9e2e-456b-8720-6fc28a4a086f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.704258] env[61898]: DEBUG nova.compute.provider_tree [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.846020] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240468, 'name': Rename_Task, 'duration_secs': 0.204435} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.846020] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 748.846020] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-828986aa-87d5-4688-8b63-2278d5a1b11a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.849832] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 748.849832] env[61898]: value = "task-1240469" [ 748.849832] env[61898]: _type = "Task" [ 748.849832] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.859019] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240469, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.146105] env[61898]: DEBUG nova.compute.manager [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 749.207898] env[61898]: DEBUG nova.scheduler.client.report [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 749.360622] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240469, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.365580] env[61898]: DEBUG nova.compute.manager [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 749.396993] env[61898]: DEBUG nova.virt.hardware [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 749.397266] env[61898]: DEBUG nova.virt.hardware [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 749.397437] env[61898]: DEBUG nova.virt.hardware [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.397620] env[61898]: DEBUG nova.virt.hardware [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 749.397762] env[61898]: DEBUG nova.virt.hardware [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.397919] env[61898]: DEBUG nova.virt.hardware [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 749.398163] env[61898]: DEBUG nova.virt.hardware [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 749.398402] env[61898]: DEBUG nova.virt.hardware [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 749.398593] env[61898]: DEBUG nova.virt.hardware [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 749.399072] env[61898]: DEBUG nova.virt.hardware [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 749.399072] env[61898]: DEBUG nova.virt.hardware [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.399884] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239e314d-c07b-44bc-baff-0cfafc3d6f75 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.408715] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f541e41-1f4b-4731-8204-876f485084de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.671887] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.713189] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.713657] env[61898]: DEBUG nova.compute.manager [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 749.716496] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.302s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.718206] env[61898]: INFO nova.compute.claims [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.861337] env[61898]: DEBUG oslo_vmware.api [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240469, 'name': PowerOnVM_Task, 'duration_secs': 0.512241} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.861645] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 749.861851] env[61898]: INFO nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Took 8.83 seconds to spawn the instance on the hypervisor. [ 749.862035] env[61898]: DEBUG nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 749.862808] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9716bda2-b3df-4ff6-8eee-f693e68ec186 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.934625] env[61898]: DEBUG nova.compute.manager [req-1bfba208-e355-427e-980d-9eff85d4a286 req-a25089f6-a3da-475e-bb39-3e109b4e85d6 service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Received event network-vif-plugged-a9c5b33c-5075-4ced-8700-0ca1e0071262 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 749.934844] env[61898]: DEBUG oslo_concurrency.lockutils [req-1bfba208-e355-427e-980d-9eff85d4a286 req-a25089f6-a3da-475e-bb39-3e109b4e85d6 service nova] Acquiring lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.935176] env[61898]: DEBUG oslo_concurrency.lockutils [req-1bfba208-e355-427e-980d-9eff85d4a286 req-a25089f6-a3da-475e-bb39-3e109b4e85d6 service nova] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.935248] env[61898]: DEBUG oslo_concurrency.lockutils [req-1bfba208-e355-427e-980d-9eff85d4a286 req-a25089f6-a3da-475e-bb39-3e109b4e85d6 service nova] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.935407] env[61898]: DEBUG nova.compute.manager [req-1bfba208-e355-427e-980d-9eff85d4a286 req-a25089f6-a3da-475e-bb39-3e109b4e85d6 service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] No waiting events found dispatching network-vif-plugged-a9c5b33c-5075-4ced-8700-0ca1e0071262 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 749.935579] env[61898]: WARNING nova.compute.manager [req-1bfba208-e355-427e-980d-9eff85d4a286 req-a25089f6-a3da-475e-bb39-3e109b4e85d6 service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Received unexpected event network-vif-plugged-a9c5b33c-5075-4ced-8700-0ca1e0071262 for instance with vm_state building and task_state spawning. [ 750.223246] env[61898]: DEBUG nova.compute.utils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 750.224853] env[61898]: DEBUG nova.compute.manager [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 750.227829] env[61898]: DEBUG nova.network.neutron [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 750.262346] env[61898]: DEBUG nova.network.neutron [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Successfully updated port: a9c5b33c-5075-4ced-8700-0ca1e0071262 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 750.389153] env[61898]: INFO nova.compute.manager [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Took 30.76 seconds to build instance. [ 750.515912] env[61898]: DEBUG nova.policy [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b766eccf77349cab3c468606a183d0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4c406fb9a02748bfa2f24158ec5d6272', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 750.736744] env[61898]: DEBUG nova.compute.manager [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 750.766710] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.767536] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquired lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.767536] env[61898]: DEBUG nova.network.neutron [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.890449] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f9e853a7-6c1c-4785-8b93-cf814902eaca tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "a0580308-d25b-47cb-9c1c-adb763be7925" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.867s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.098572] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c3089a-6f5c-4f6b-9515-c2b2513b5f79 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.108827] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6f9292-7f9f-4c9e-9547-2dd72516b428 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.112969] env[61898]: DEBUG nova.network.neutron [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Successfully created port: 0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.151039] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72ff2d8-cb43-4670-a472-0ddd057bcff1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.158779] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8238b419-be53-4b0b-b7de-46e74e21a764 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.173678] env[61898]: DEBUG nova.compute.provider_tree [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.324854] env[61898]: DEBUG nova.network.neutron [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 751.394613] env[61898]: DEBUG nova.compute.manager [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 751.581334] env[61898]: DEBUG nova.network.neutron [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Updating instance_info_cache with network_info: [{"id": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "address": "fa:16:3e:4c:bb:e6", "network": {"id": "a7782ec9-c3cb-41be-b52c-f40deb036970", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-870279198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ca09762c2e4b119437aa5b1a36e133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9c5b33c-50", "ovs_interfaceid": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.676539] env[61898]: DEBUG nova.scheduler.client.report [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 751.748069] env[61898]: DEBUG nova.compute.manager [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 751.783696] env[61898]: DEBUG nova.virt.hardware [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 751.783696] env[61898]: DEBUG nova.virt.hardware [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 751.783696] env[61898]: DEBUG nova.virt.hardware [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.783920] env[61898]: DEBUG nova.virt.hardware [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 751.783920] env[61898]: DEBUG nova.virt.hardware [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.783920] env[61898]: DEBUG nova.virt.hardware [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 751.783920] env[61898]: DEBUG nova.virt.hardware [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 751.784080] env[61898]: DEBUG nova.virt.hardware [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 751.784246] env[61898]: DEBUG nova.virt.hardware [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 751.784406] env[61898]: DEBUG nova.virt.hardware [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 751.784579] env[61898]: DEBUG nova.virt.hardware [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 751.785479] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7aaf90-781c-4d0d-b16e-495732a087b8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.794134] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7a4b5d-78c6-4767-9483-220ff5a8317d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.916194] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.966062] env[61898]: DEBUG nova.compute.manager [req-35cc8162-de17-43c5-8639-98e029fab9d8 req-1ce2934c-33f1-4fea-9653-590f05fdd300 service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Received event network-changed-a9c5b33c-5075-4ced-8700-0ca1e0071262 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 751.966347] env[61898]: DEBUG nova.compute.manager [req-35cc8162-de17-43c5-8639-98e029fab9d8 req-1ce2934c-33f1-4fea-9653-590f05fdd300 service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Refreshing instance network info cache due to event network-changed-a9c5b33c-5075-4ced-8700-0ca1e0071262. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 751.966503] env[61898]: DEBUG oslo_concurrency.lockutils [req-35cc8162-de17-43c5-8639-98e029fab9d8 req-1ce2934c-33f1-4fea-9653-590f05fdd300 service nova] Acquiring lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.082700] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Releasing lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.083126] env[61898]: DEBUG nova.compute.manager [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Instance network_info: |[{"id": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "address": "fa:16:3e:4c:bb:e6", "network": {"id": "a7782ec9-c3cb-41be-b52c-f40deb036970", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-870279198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ca09762c2e4b119437aa5b1a36e133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9c5b33c-50", "ovs_interfaceid": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 752.083464] env[61898]: DEBUG oslo_concurrency.lockutils [req-35cc8162-de17-43c5-8639-98e029fab9d8 req-1ce2934c-33f1-4fea-9653-590f05fdd300 service nova] Acquired lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.083645] env[61898]: DEBUG nova.network.neutron [req-35cc8162-de17-43c5-8639-98e029fab9d8 req-1ce2934c-33f1-4fea-9653-590f05fdd300 service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Refreshing network info cache for port a9c5b33c-5075-4ced-8700-0ca1e0071262 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 752.085150] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:bb:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d054505-89d3-49c5-8b38-5da917a42c49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9c5b33c-5075-4ced-8700-0ca1e0071262', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 752.092841] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Creating folder: Project (98ca09762c2e4b119437aa5b1a36e133). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 752.095848] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c780ae90-89df-43fd-b346-6e3536bfe090 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.107531] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Created folder: Project (98ca09762c2e4b119437aa5b1a36e133) in parent group-v267550. [ 752.107730] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Creating folder: Instances. Parent ref: group-v267584. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 752.107964] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc466de0-0031-4a91-b380-29ad785696eb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.117836] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Created folder: Instances in parent group-v267584. [ 752.118014] env[61898]: DEBUG oslo.service.loopingcall [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 752.118203] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 752.118396] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2165a599-4a13-4d28-95bc-75d377911828 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.141084] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 752.141084] env[61898]: value = "task-1240473" [ 752.141084] env[61898]: _type = "Task" [ 752.141084] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.149382] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240473, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.182055] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.182811] env[61898]: DEBUG nova.compute.manager [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 752.185418] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 11.055s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.185596] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.185820] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 752.186020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.851s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.187937] env[61898]: INFO nova.compute.claims [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.193371] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfe1e9d-7499-4fac-a8d7-4637b3995175 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.203272] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6185bb11-106e-4d31-9faa-87d3c34c7a19 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.217746] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f37058-dac7-4863-afc9-c9915297496a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.225965] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e37502-586d-4810-877e-e9248d01bf39 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.231823] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "9e6a3749-1974-4818-9cc6-76367d41b7e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.231823] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "9e6a3749-1974-4818-9cc6-76367d41b7e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.231823] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "9e6a3749-1974-4818-9cc6-76367d41b7e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.231823] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "9e6a3749-1974-4818-9cc6-76367d41b7e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.232059] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "9e6a3749-1974-4818-9cc6-76367d41b7e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.233360] env[61898]: INFO nova.compute.manager [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Terminating instance [ 752.261214] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181479MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 752.261287] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.262169] env[61898]: DEBUG nova.compute.manager [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 752.262418] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 752.263498] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ecafd1-80cb-4342-a134-7e7902848f1e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.275652] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 752.275916] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38d4d982-6801-42f3-add4-f8ba2a9d9b76 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.282536] env[61898]: DEBUG oslo_vmware.api [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 752.282536] env[61898]: value = "task-1240474" [ 752.282536] env[61898]: _type = "Task" [ 752.282536] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.291476] env[61898]: DEBUG oslo_vmware.api [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240474, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.445685] env[61898]: DEBUG nova.network.neutron [req-35cc8162-de17-43c5-8639-98e029fab9d8 req-1ce2934c-33f1-4fea-9653-590f05fdd300 service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Updated VIF entry in instance network info cache for port a9c5b33c-5075-4ced-8700-0ca1e0071262. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.446090] env[61898]: DEBUG nova.network.neutron [req-35cc8162-de17-43c5-8639-98e029fab9d8 req-1ce2934c-33f1-4fea-9653-590f05fdd300 service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Updating instance_info_cache with network_info: [{"id": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "address": "fa:16:3e:4c:bb:e6", "network": {"id": "a7782ec9-c3cb-41be-b52c-f40deb036970", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-870279198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ca09762c2e4b119437aa5b1a36e133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9c5b33c-50", "ovs_interfaceid": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.650511] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240473, 'name': CreateVM_Task, 'duration_secs': 0.325964} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.650707] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 752.651383] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.651664] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.652054] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 752.652319] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e23a9718-1ed1-4a6a-9f1a-9f39f167335e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.656619] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 752.656619] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ecd26b-7265-557b-f59e-33d34941c945" [ 752.656619] env[61898]: _type = "Task" [ 752.656619] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.664284] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ecd26b-7265-557b-f59e-33d34941c945, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.691889] env[61898]: DEBUG nova.compute.utils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 752.693311] env[61898]: DEBUG nova.compute.manager [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 752.693472] env[61898]: DEBUG nova.network.neutron [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 752.734939] env[61898]: DEBUG nova.policy [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba42f2b4883140d4bb94dfac9119e400', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a95a9d6ac5c3414db6d8891ee1ada25a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 752.791566] env[61898]: DEBUG oslo_vmware.api [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240474, 'name': PowerOffVM_Task, 'duration_secs': 0.195349} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.791823] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 752.791983] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 752.792238] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a95d2fbf-6572-4f91-9bf9-ac12de989c32 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.851578] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 752.851776] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 752.851948] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Deleting the datastore file [datastore2] 9e6a3749-1974-4818-9cc6-76367d41b7e5 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 752.852207] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8939709-1a59-4d34-9557-5ed323c7c040 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.858500] env[61898]: DEBUG oslo_vmware.api [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 752.858500] env[61898]: value = "task-1240476" [ 752.858500] env[61898]: _type = "Task" [ 752.858500] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.865546] env[61898]: DEBUG oslo_vmware.api [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240476, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.950856] env[61898]: DEBUG oslo_concurrency.lockutils [req-35cc8162-de17-43c5-8639-98e029fab9d8 req-1ce2934c-33f1-4fea-9653-590f05fdd300 service nova] Releasing lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.979663] env[61898]: DEBUG nova.network.neutron [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Successfully created port: 4a4f6aae-310b-4c24-8d85-e7b3058aa170 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 753.167430] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ecd26b-7265-557b-f59e-33d34941c945, 'name': SearchDatastore_Task, 'duration_secs': 0.009447} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.167727] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.167956] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 753.168233] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.168333] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.168506] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 753.168757] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b025018-8aed-4361-a74a-eac571ecbb7e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.176648] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 753.176648] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 753.178712] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e48ab72d-66fb-4284-bc08-21e77080cd81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.182305] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 753.182305] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5297ae54-07ed-102d-44fc-c8c844d8b1eb" [ 753.182305] env[61898]: _type = "Task" [ 753.182305] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.188653] env[61898]: DEBUG nova.compute.manager [req-6a3ff7b6-6502-462c-a4e2-610cea66c75c req-35bf3491-3588-45da-a14a-f6b88910badc service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Received event network-vif-plugged-0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 753.188844] env[61898]: DEBUG oslo_concurrency.lockutils [req-6a3ff7b6-6502-462c-a4e2-610cea66c75c req-35bf3491-3588-45da-a14a-f6b88910badc service nova] Acquiring lock "4c744673-0d9b-44ef-938f-372b101a2053-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.189050] env[61898]: DEBUG oslo_concurrency.lockutils [req-6a3ff7b6-6502-462c-a4e2-610cea66c75c req-35bf3491-3588-45da-a14a-f6b88910badc service nova] Lock "4c744673-0d9b-44ef-938f-372b101a2053-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.189213] env[61898]: DEBUG oslo_concurrency.lockutils [req-6a3ff7b6-6502-462c-a4e2-610cea66c75c req-35bf3491-3588-45da-a14a-f6b88910badc service nova] Lock "4c744673-0d9b-44ef-938f-372b101a2053-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.189372] env[61898]: DEBUG nova.compute.manager [req-6a3ff7b6-6502-462c-a4e2-610cea66c75c req-35bf3491-3588-45da-a14a-f6b88910badc service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] No waiting events found dispatching network-vif-plugged-0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 753.189527] env[61898]: WARNING nova.compute.manager [req-6a3ff7b6-6502-462c-a4e2-610cea66c75c req-35bf3491-3588-45da-a14a-f6b88910badc service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Received unexpected event network-vif-plugged-0d2007ae-42aa-44eb-9414-3216e1c433d4 for instance with vm_state building and task_state spawning. [ 753.193084] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5297ae54-07ed-102d-44fc-c8c844d8b1eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.197020] env[61898]: DEBUG nova.compute.manager [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 753.335079] env[61898]: DEBUG nova.network.neutron [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Successfully updated port: 0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 753.371311] env[61898]: DEBUG oslo_vmware.api [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240476, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169936} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.371594] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 753.371779] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 753.371952] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 753.372132] env[61898]: INFO nova.compute.manager [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 753.372371] env[61898]: DEBUG oslo.service.loopingcall [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 753.372555] env[61898]: DEBUG nova.compute.manager [-] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 753.372656] env[61898]: DEBUG nova.network.neutron [-] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 753.480454] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d207f5a-59e6-41a3-bb75-7f287c6204db {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.488273] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61f404d-ad7e-4f29-acc9-b568c1591a61 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.518550] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b40fbdb-ae0c-45b8-b013-d21bc4b53f6f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.525933] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8b6609-c7bf-43df-863d-55d19d264e71 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.539206] env[61898]: DEBUG nova.compute.provider_tree [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.692444] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5297ae54-07ed-102d-44fc-c8c844d8b1eb, 'name': SearchDatastore_Task, 'duration_secs': 0.009129} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.693234] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ad02834-fa5e-4311-bf27-ab766c077f5a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.698882] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 753.698882] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a5994e-a7f2-b31c-a013-adae98e0174f" [ 753.698882] env[61898]: _type = "Task" [ 753.698882] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.709777] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a5994e-a7f2-b31c-a013-adae98e0174f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.843812] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.844017] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquired lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.844219] env[61898]: DEBUG nova.network.neutron [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 754.046490] env[61898]: DEBUG nova.scheduler.client.report [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 754.219448] env[61898]: DEBUG nova.compute.manager [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 754.219448] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a5994e-a7f2-b31c-a013-adae98e0174f, 'name': SearchDatastore_Task, 'duration_secs': 0.033298} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.219448] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.219448] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 1fb4535d-47d8-45c5-b6d6-d05e57237b98/1fb4535d-47d8-45c5-b6d6-d05e57237b98.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 754.222208] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26eda8b8-6def-49cd-b77f-2a7a380cebcc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.231542] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 754.231542] env[61898]: value = "task-1240477" [ 754.231542] env[61898]: _type = "Task" [ 754.231542] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.241094] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240477, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.250992] env[61898]: DEBUG nova.virt.hardware [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 754.251955] env[61898]: DEBUG nova.virt.hardware [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 754.251955] env[61898]: DEBUG nova.virt.hardware [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 754.251955] env[61898]: DEBUG nova.virt.hardware [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 754.251955] env[61898]: DEBUG nova.virt.hardware [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 754.251955] env[61898]: DEBUG nova.virt.hardware [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 754.252269] env[61898]: DEBUG nova.virt.hardware [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 754.252383] env[61898]: DEBUG nova.virt.hardware [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 754.252592] env[61898]: DEBUG nova.virt.hardware [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 754.252793] env[61898]: DEBUG nova.virt.hardware [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 754.253008] env[61898]: DEBUG nova.virt.hardware [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 754.253906] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4deb9cf3-fe6c-4e73-bcbe-976c0214174d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.067962] env[61898]: DEBUG nova.network.neutron [-] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.070036] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.884s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.070533] env[61898]: DEBUG nova.compute.manager [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 755.074052] env[61898]: DEBUG nova.network.neutron [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Successfully updated port: 4a4f6aae-310b-4c24-8d85-e7b3058aa170 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 755.076651] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.832s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.078522] env[61898]: INFO nova.compute.claims [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 755.091623] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c19844-64a6-4e75-842b-6cbc75bbfb0b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.094278] env[61898]: DEBUG nova.compute.manager [req-3ef1555f-1cd8-4e90-955b-1ef8919c3de2 req-e72cd52c-4fb6-428e-ac52-b8b45dcf8583 service nova] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Received event network-vif-plugged-4a4f6aae-310b-4c24-8d85-e7b3058aa170 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 755.094480] env[61898]: DEBUG oslo_concurrency.lockutils [req-3ef1555f-1cd8-4e90-955b-1ef8919c3de2 req-e72cd52c-4fb6-428e-ac52-b8b45dcf8583 service nova] Acquiring lock "34338563-05d4-477b-8480-6ef4cbf28e72-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.094677] env[61898]: DEBUG oslo_concurrency.lockutils [req-3ef1555f-1cd8-4e90-955b-1ef8919c3de2 req-e72cd52c-4fb6-428e-ac52-b8b45dcf8583 service nova] Lock "34338563-05d4-477b-8480-6ef4cbf28e72-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.094837] env[61898]: DEBUG oslo_concurrency.lockutils [req-3ef1555f-1cd8-4e90-955b-1ef8919c3de2 req-e72cd52c-4fb6-428e-ac52-b8b45dcf8583 service nova] Lock "34338563-05d4-477b-8480-6ef4cbf28e72-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.094993] env[61898]: DEBUG nova.compute.manager [req-3ef1555f-1cd8-4e90-955b-1ef8919c3de2 req-e72cd52c-4fb6-428e-ac52-b8b45dcf8583 service nova] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] No waiting events found dispatching network-vif-plugged-4a4f6aae-310b-4c24-8d85-e7b3058aa170 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 755.095163] env[61898]: WARNING nova.compute.manager [req-3ef1555f-1cd8-4e90-955b-1ef8919c3de2 req-e72cd52c-4fb6-428e-ac52-b8b45dcf8583 service nova] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Received unexpected event network-vif-plugged-4a4f6aae-310b-4c24-8d85-e7b3058aa170 for instance with vm_state building and task_state spawning. [ 755.104089] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240477, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466906} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.115377] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 1fb4535d-47d8-45c5-b6d6-d05e57237b98/1fb4535d-47d8-45c5-b6d6-d05e57237b98.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 755.115633] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 755.117592] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d39d403e-ad1b-4d4f-8c10-059d10eee9b7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.125903] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 755.125903] env[61898]: value = "task-1240478" [ 755.125903] env[61898]: _type = "Task" [ 755.125903] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.135015] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240478, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.177144] env[61898]: DEBUG nova.network.neutron [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 755.368735] env[61898]: DEBUG nova.compute.manager [req-3b44fbf9-1dd9-4d47-b134-3658aacfba5d req-a2da8be6-7977-4754-b938-75b267c74859 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Received event network-changed-0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 755.368735] env[61898]: DEBUG nova.compute.manager [req-3b44fbf9-1dd9-4d47-b134-3658aacfba5d req-a2da8be6-7977-4754-b938-75b267c74859 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Refreshing instance network info cache due to event network-changed-0d2007ae-42aa-44eb-9414-3216e1c433d4. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 755.368735] env[61898]: DEBUG oslo_concurrency.lockutils [req-3b44fbf9-1dd9-4d47-b134-3658aacfba5d req-a2da8be6-7977-4754-b938-75b267c74859 service nova] Acquiring lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.453564] env[61898]: DEBUG nova.network.neutron [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updating instance_info_cache with network_info: [{"id": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "address": "fa:16:3e:89:6e:d4", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d2007ae-42", "ovs_interfaceid": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.577397] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "refresh_cache-34338563-05d4-477b-8480-6ef4cbf28e72" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.577397] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquired lock "refresh_cache-34338563-05d4-477b-8480-6ef4cbf28e72" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.577397] env[61898]: DEBUG nova.network.neutron [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 755.578590] env[61898]: INFO nova.compute.manager [-] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Took 2.21 seconds to deallocate network for instance. [ 755.597228] env[61898]: DEBUG nova.compute.utils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 755.600856] env[61898]: DEBUG nova.compute.manager [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 755.601898] env[61898]: DEBUG nova.network.neutron [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 755.637450] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240478, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.21734} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.637632] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.639318] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e0c3b2-1146-4f3f-8e7a-8c41db44078c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.672937] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 1fb4535d-47d8-45c5-b6d6-d05e57237b98/1fb4535d-47d8-45c5-b6d6-d05e57237b98.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.672937] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db64b6aa-cccd-4345-ad05-fa6dd7ed4112 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.690921] env[61898]: DEBUG nova.policy [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a2508b1f3f945459495cef52abefedb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '819c8a7ff0aa4d7186bd859e4b56d16e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 755.698029] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 755.698029] env[61898]: value = "task-1240479" [ 755.698029] env[61898]: _type = "Task" [ 755.698029] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.705942] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240479, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.958859] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Releasing lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.959906] env[61898]: DEBUG nova.compute.manager [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Instance network_info: |[{"id": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "address": "fa:16:3e:89:6e:d4", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d2007ae-42", "ovs_interfaceid": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 755.959906] env[61898]: DEBUG oslo_concurrency.lockutils [req-3b44fbf9-1dd9-4d47-b134-3658aacfba5d req-a2da8be6-7977-4754-b938-75b267c74859 service nova] Acquired lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.960123] env[61898]: DEBUG nova.network.neutron [req-3b44fbf9-1dd9-4d47-b134-3658aacfba5d req-a2da8be6-7977-4754-b938-75b267c74859 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Refreshing network info cache for port 0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 755.962303] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:6e:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f016d1-34a6-4ebd-81ed-a6bf9d109b87', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d2007ae-42aa-44eb-9414-3216e1c433d4', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 755.970148] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Creating folder: Project (4c406fb9a02748bfa2f24158ec5d6272). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 755.970706] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ef31ef0-9fda-4a6f-8f7b-e6ff851f7274 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.981829] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Created folder: Project (4c406fb9a02748bfa2f24158ec5d6272) in parent group-v267550. [ 755.981829] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Creating folder: Instances. Parent ref: group-v267587. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 755.982152] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-19576101-a063-455c-8ee7-8a9090558730 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.990756] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Created folder: Instances in parent group-v267587. [ 755.990965] env[61898]: DEBUG oslo.service.loopingcall [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 755.991167] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 755.991487] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-96a2e73e-704f-454e-a8d3-119aaf35694a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.017133] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.017133] env[61898]: value = "task-1240482" [ 756.017133] env[61898]: _type = "Task" [ 756.017133] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.025148] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240482, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.041093] env[61898]: DEBUG nova.network.neutron [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Successfully created port: ddb06f4c-13ed-4322-b1e8-f4022b32e4f4 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.092070] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.108141] env[61898]: DEBUG nova.compute.manager [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 756.128737] env[61898]: DEBUG nova.network.neutron [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.209333] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240479, 'name': ReconfigVM_Task, 'duration_secs': 0.277525} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.211033] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 1fb4535d-47d8-45c5-b6d6-d05e57237b98/1fb4535d-47d8-45c5-b6d6-d05e57237b98.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.211033] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9395dff8-0445-4148-8963-9a954296b609 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.217398] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 756.217398] env[61898]: value = "task-1240483" [ 756.217398] env[61898]: _type = "Task" [ 756.217398] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.230361] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240483, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.373484] env[61898]: DEBUG nova.network.neutron [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Updating instance_info_cache with network_info: [{"id": "4a4f6aae-310b-4c24-8d85-e7b3058aa170", "address": "fa:16:3e:c8:0a:4f", "network": {"id": "99175ca1-0f41-43df-bcf9-5f78e866129a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1860546503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a95a9d6ac5c3414db6d8891ee1ada25a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a4f6aae-31", "ovs_interfaceid": "4a4f6aae-310b-4c24-8d85-e7b3058aa170", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.460938] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f103f4cc-1348-4904-90bc-1ed6cfbe2310 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.468592] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4119a72b-ceaa-42a4-8287-6024b75cc2b8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.511388] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06cafe4-286a-4d07-bdbb-2cadc8f0f854 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.522061] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3915fa03-ad09-4335-a010-ecc295afc876 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.529037] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240482, 'name': CreateVM_Task, 'duration_secs': 0.339881} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.529573] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.530285] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.530455] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.530760] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 756.538295] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfbf6123-b86e-4f3e-ba08-bf99c2b70883 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.540514] env[61898]: DEBUG nova.compute.provider_tree [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.545027] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 756.545027] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5221c2cb-f109-de11-b423-4cf98e6fc98e" [ 756.545027] env[61898]: _type = "Task" [ 756.545027] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.553677] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5221c2cb-f109-de11-b423-4cf98e6fc98e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.699752] env[61898]: DEBUG nova.network.neutron [req-3b44fbf9-1dd9-4d47-b134-3658aacfba5d req-a2da8be6-7977-4754-b938-75b267c74859 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updated VIF entry in instance network info cache for port 0d2007ae-42aa-44eb-9414-3216e1c433d4. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 756.700230] env[61898]: DEBUG nova.network.neutron [req-3b44fbf9-1dd9-4d47-b134-3658aacfba5d req-a2da8be6-7977-4754-b938-75b267c74859 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updating instance_info_cache with network_info: [{"id": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "address": "fa:16:3e:89:6e:d4", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d2007ae-42", "ovs_interfaceid": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.727123] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240483, 'name': Rename_Task, 'duration_secs': 0.160075} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.727617] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.727617] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27b81c0e-c3f4-48e5-8f73-305a8b0f3f55 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.735223] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 756.735223] env[61898]: value = "task-1240484" [ 756.735223] env[61898]: _type = "Task" [ 756.735223] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.743710] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240484, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.877966] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Releasing lock "refresh_cache-34338563-05d4-477b-8480-6ef4cbf28e72" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.878058] env[61898]: DEBUG nova.compute.manager [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Instance network_info: |[{"id": "4a4f6aae-310b-4c24-8d85-e7b3058aa170", "address": "fa:16:3e:c8:0a:4f", "network": {"id": "99175ca1-0f41-43df-bcf9-5f78e866129a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1860546503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a95a9d6ac5c3414db6d8891ee1ada25a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a4f6aae-31", "ovs_interfaceid": "4a4f6aae-310b-4c24-8d85-e7b3058aa170", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 756.878468] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:0a:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '412cde91-d0f0-4193-b36b-d8b9d17384c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a4f6aae-310b-4c24-8d85-e7b3058aa170', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 756.886680] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Creating folder: Project (a95a9d6ac5c3414db6d8891ee1ada25a). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.887016] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1392c502-cbbc-4369-b19d-8f37e7f02617 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.898017] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Created folder: Project (a95a9d6ac5c3414db6d8891ee1ada25a) in parent group-v267550. [ 756.898118] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Creating folder: Instances. Parent ref: group-v267590. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 756.898310] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61a7d607-826f-458d-be5c-24153f5ace85 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.907365] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Created folder: Instances in parent group-v267590. [ 756.907665] env[61898]: DEBUG oslo.service.loopingcall [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 756.908231] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 756.908231] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f995dcac-d8a1-459a-9118-bf62bd7238bf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.927063] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 756.927063] env[61898]: value = "task-1240487" [ 756.927063] env[61898]: _type = "Task" [ 756.927063] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.934747] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240487, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.958745] env[61898]: DEBUG nova.compute.manager [req-29e03f86-0be2-4800-a05c-887f66e87be4 req-f0bb86ae-0eb0-4d73-a703-cade4e7f666a service nova] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Received event network-changed-4a4f6aae-310b-4c24-8d85-e7b3058aa170 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 756.958979] env[61898]: DEBUG nova.compute.manager [req-29e03f86-0be2-4800-a05c-887f66e87be4 req-f0bb86ae-0eb0-4d73-a703-cade4e7f666a service nova] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Refreshing instance network info cache due to event network-changed-4a4f6aae-310b-4c24-8d85-e7b3058aa170. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 756.960030] env[61898]: DEBUG oslo_concurrency.lockutils [req-29e03f86-0be2-4800-a05c-887f66e87be4 req-f0bb86ae-0eb0-4d73-a703-cade4e7f666a service nova] Acquiring lock "refresh_cache-34338563-05d4-477b-8480-6ef4cbf28e72" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.960030] env[61898]: DEBUG oslo_concurrency.lockutils [req-29e03f86-0be2-4800-a05c-887f66e87be4 req-f0bb86ae-0eb0-4d73-a703-cade4e7f666a service nova] Acquired lock "refresh_cache-34338563-05d4-477b-8480-6ef4cbf28e72" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.960030] env[61898]: DEBUG nova.network.neutron [req-29e03f86-0be2-4800-a05c-887f66e87be4 req-f0bb86ae-0eb0-4d73-a703-cade4e7f666a service nova] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Refreshing network info cache for port 4a4f6aae-310b-4c24-8d85-e7b3058aa170 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 757.043955] env[61898]: DEBUG nova.scheduler.client.report [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 757.060481] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5221c2cb-f109-de11-b423-4cf98e6fc98e, 'name': SearchDatastore_Task, 'duration_secs': 0.027849} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.060877] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.061104] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.061335] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.061622] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.061714] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.061989] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-226007c8-ea8f-4b2d-bf62-11fb70fa516f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.071715] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.071930] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.072855] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e39cbc40-b540-411b-943a-5b355cda2b35 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.079120] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 757.079120] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52484cef-6812-c65a-9289-894d69e27e36" [ 757.079120] env[61898]: _type = "Task" [ 757.079120] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.088650] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52484cef-6812-c65a-9289-894d69e27e36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.117338] env[61898]: DEBUG nova.compute.manager [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 757.142317] env[61898]: DEBUG nova.virt.hardware [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 757.144021] env[61898]: DEBUG nova.virt.hardware [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 757.144021] env[61898]: DEBUG nova.virt.hardware [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.144021] env[61898]: DEBUG nova.virt.hardware [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 757.144021] env[61898]: DEBUG nova.virt.hardware [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.144021] env[61898]: DEBUG nova.virt.hardware [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 757.144263] env[61898]: DEBUG nova.virt.hardware [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 757.144263] env[61898]: DEBUG nova.virt.hardware [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 757.144263] env[61898]: DEBUG nova.virt.hardware [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 757.144263] env[61898]: DEBUG nova.virt.hardware [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 757.144263] env[61898]: DEBUG nova.virt.hardware [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 757.145853] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a5c1bc-ee1e-4c2b-92f2-1fdc66e73944 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.154377] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a7187f-a598-4b73-87a0-9e64fa8272c9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.203460] env[61898]: DEBUG oslo_concurrency.lockutils [req-3b44fbf9-1dd9-4d47-b134-3658aacfba5d req-a2da8be6-7977-4754-b938-75b267c74859 service nova] Releasing lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.203819] env[61898]: DEBUG nova.compute.manager [req-3b44fbf9-1dd9-4d47-b134-3658aacfba5d req-a2da8be6-7977-4754-b938-75b267c74859 service nova] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Received event network-vif-deleted-6fc02f07-c316-49a4-8c33-f5a9db5f2387 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 757.245056] env[61898]: DEBUG oslo_vmware.api [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240484, 'name': PowerOnVM_Task, 'duration_secs': 0.492281} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.245253] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 757.245453] env[61898]: INFO nova.compute.manager [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Took 7.88 seconds to spawn the instance on the hypervisor. [ 757.245629] env[61898]: DEBUG nova.compute.manager [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 757.246720] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8bc9e1-c62c-4c4b-b267-b572891ac3e8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.436926] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240487, 'name': CreateVM_Task, 'duration_secs': 0.386986} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.437172] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 757.437873] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.438047] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.438466] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 757.438740] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7b29267-bc9e-4f78-8c48-0c94363b1382 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.443169] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for the task: (returnval){ [ 757.443169] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52372bd6-385a-5faa-c4d9-9921e670dc39" [ 757.443169] env[61898]: _type = "Task" [ 757.443169] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.450881] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52372bd6-385a-5faa-c4d9-9921e670dc39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.554068] env[61898]: DEBUG nova.compute.manager [req-41101018-9799-4e41-8e5d-0b3393438dba req-956e3db6-39f8-4070-964b-9794089b77d3 service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Received event network-vif-plugged-ddb06f4c-13ed-4322-b1e8-f4022b32e4f4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 757.554308] env[61898]: DEBUG oslo_concurrency.lockutils [req-41101018-9799-4e41-8e5d-0b3393438dba req-956e3db6-39f8-4070-964b-9794089b77d3 service nova] Acquiring lock "1aa03975-f18f-4e64-836e-e991b73ee9d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.554561] env[61898]: DEBUG oslo_concurrency.lockutils [req-41101018-9799-4e41-8e5d-0b3393438dba req-956e3db6-39f8-4070-964b-9794089b77d3 service nova] Lock "1aa03975-f18f-4e64-836e-e991b73ee9d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.554741] env[61898]: DEBUG oslo_concurrency.lockutils [req-41101018-9799-4e41-8e5d-0b3393438dba req-956e3db6-39f8-4070-964b-9794089b77d3 service nova] Lock "1aa03975-f18f-4e64-836e-e991b73ee9d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.554910] env[61898]: DEBUG nova.compute.manager [req-41101018-9799-4e41-8e5d-0b3393438dba req-956e3db6-39f8-4070-964b-9794089b77d3 service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] No waiting events found dispatching network-vif-plugged-ddb06f4c-13ed-4322-b1e8-f4022b32e4f4 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 757.555256] env[61898]: WARNING nova.compute.manager [req-41101018-9799-4e41-8e5d-0b3393438dba req-956e3db6-39f8-4070-964b-9794089b77d3 service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Received unexpected event network-vif-plugged-ddb06f4c-13ed-4322-b1e8-f4022b32e4f4 for instance with vm_state building and task_state spawning. [ 757.555964] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.479s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.556475] env[61898]: DEBUG nova.compute.manager [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 757.559582] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.365s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.561430] env[61898]: INFO nova.compute.claims [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.568029] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.568245] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.591602] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52484cef-6812-c65a-9289-894d69e27e36, 'name': SearchDatastore_Task, 'duration_secs': 0.009452} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.592881] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82716598-2616-4181-9600-aebc4a8721ca {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.599582] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 757.599582] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e1ee3f-1223-fdb2-b989-461511909022" [ 757.599582] env[61898]: _type = "Task" [ 757.599582] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.612057] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e1ee3f-1223-fdb2-b989-461511909022, 'name': SearchDatastore_Task, 'duration_secs': 0.009582} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.612317] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.612565] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 4c744673-0d9b-44ef-938f-372b101a2053/4c744673-0d9b-44ef-938f-372b101a2053.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 757.612817] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9593975-48da-44f9-a040-f536e6d38a1e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.620135] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 757.620135] env[61898]: value = "task-1240488" [ 757.620135] env[61898]: _type = "Task" [ 757.620135] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.627900] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240488, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.674620] env[61898]: DEBUG nova.network.neutron [req-29e03f86-0be2-4800-a05c-887f66e87be4 req-f0bb86ae-0eb0-4d73-a703-cade4e7f666a service nova] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Updated VIF entry in instance network info cache for port 4a4f6aae-310b-4c24-8d85-e7b3058aa170. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 757.674945] env[61898]: DEBUG nova.network.neutron [req-29e03f86-0be2-4800-a05c-887f66e87be4 req-f0bb86ae-0eb0-4d73-a703-cade4e7f666a service nova] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Updating instance_info_cache with network_info: [{"id": "4a4f6aae-310b-4c24-8d85-e7b3058aa170", "address": "fa:16:3e:c8:0a:4f", "network": {"id": "99175ca1-0f41-43df-bcf9-5f78e866129a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1860546503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a95a9d6ac5c3414db6d8891ee1ada25a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "412cde91-d0f0-4193-b36b-d8b9d17384c6", "external-id": "nsx-vlan-transportzone-461", "segmentation_id": 461, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a4f6aae-31", "ovs_interfaceid": "4a4f6aae-310b-4c24-8d85-e7b3058aa170", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.680480] env[61898]: DEBUG nova.network.neutron [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Successfully updated port: ddb06f4c-13ed-4322-b1e8-f4022b32e4f4 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 757.768407] env[61898]: INFO nova.compute.manager [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Took 27.04 seconds to build instance. [ 757.955849] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52372bd6-385a-5faa-c4d9-9921e670dc39, 'name': SearchDatastore_Task, 'duration_secs': 0.009812} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.956206] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.956447] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.956689] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.956831] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.957015] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.957295] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-653f3b0c-7f37-4e2c-a347-0c93a5e0a077 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.971107] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.971107] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.972095] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5b7e9f7-a2f7-4f9e-b28a-709fe044e14f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.978626] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for the task: (returnval){ [ 757.978626] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d36e85-2e5a-f47c-09e3-39257e874cd1" [ 757.978626] env[61898]: _type = "Task" [ 757.978626] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.988216] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d36e85-2e5a-f47c-09e3-39257e874cd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.061545] env[61898]: DEBUG nova.compute.utils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 758.063492] env[61898]: DEBUG nova.compute.manager [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 758.063943] env[61898]: DEBUG nova.network.neutron [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 758.114029] env[61898]: DEBUG nova.policy [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a2508b1f3f945459495cef52abefedb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '819c8a7ff0aa4d7186bd859e4b56d16e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 758.129894] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240488, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438801} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.130200] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 4c744673-0d9b-44ef-938f-372b101a2053/4c744673-0d9b-44ef-938f-372b101a2053.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 758.130389] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 758.130656] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e4238c5-cff3-443d-b1df-3fa610b90886 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.138551] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 758.138551] env[61898]: value = "task-1240489" [ 758.138551] env[61898]: _type = "Task" [ 758.138551] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.147026] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240489, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.178763] env[61898]: DEBUG oslo_concurrency.lockutils [req-29e03f86-0be2-4800-a05c-887f66e87be4 req-f0bb86ae-0eb0-4d73-a703-cade4e7f666a service nova] Releasing lock "refresh_cache-34338563-05d4-477b-8480-6ef4cbf28e72" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.182977] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "refresh_cache-1aa03975-f18f-4e64-836e-e991b73ee9d5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.183587] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "refresh_cache-1aa03975-f18f-4e64-836e-e991b73ee9d5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.183627] env[61898]: DEBUG nova.network.neutron [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 758.270909] env[61898]: DEBUG oslo_concurrency.lockutils [None req-94d8bae7-da11-4b17-8ed8-1dc7fa4cd0f9 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 141.488s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.490441] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d36e85-2e5a-f47c-09e3-39257e874cd1, 'name': SearchDatastore_Task, 'duration_secs': 0.053837} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.491572] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28fa1cb1-a853-4e0b-920b-ee8f8ad95d81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.498940] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for the task: (returnval){ [ 758.498940] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525ee682-950b-1385-eb05-90c9874fffc0" [ 758.498940] env[61898]: _type = "Task" [ 758.498940] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.506318] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525ee682-950b-1385-eb05-90c9874fffc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.566962] env[61898]: DEBUG nova.compute.manager [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 758.648810] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240489, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065931} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.649217] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 758.649835] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc28db46-eea8-4158-85f6-4447a1d9ac3a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.671831] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 4c744673-0d9b-44ef-938f-372b101a2053/4c744673-0d9b-44ef-938f-372b101a2053.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 758.672786] env[61898]: DEBUG nova.network.neutron [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Successfully created port: c2aa5537-be01-417c-8f48-83f2f9a04ff4 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 758.676662] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea8d88a0-27d8-446d-bee3-5fe477b703f3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.704242] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 758.704242] env[61898]: value = "task-1240490" [ 758.704242] env[61898]: _type = "Task" [ 758.704242] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.717225] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240490, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.727326] env[61898]: DEBUG nova.network.neutron [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.775162] env[61898]: DEBUG nova.compute.manager [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 758.876450] env[61898]: DEBUG nova.network.neutron [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Updating instance_info_cache with network_info: [{"id": "ddb06f4c-13ed-4322-b1e8-f4022b32e4f4", "address": "fa:16:3e:d2:5d:13", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddb06f4c-13", "ovs_interfaceid": "ddb06f4c-13ed-4322-b1e8-f4022b32e4f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.926577] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7755842a-9d1d-4f0b-a9ad-1803f5842906 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.934535] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0eda048-5bac-4c85-8118-c8192da7ce99 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.965941] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c3682f-aa10-4e42-97c1-dae3cb34c4ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.973541] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f96217-b47e-4d77-bf09-ea170cac8886 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.987738] env[61898]: DEBUG nova.compute.provider_tree [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.006667] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525ee682-950b-1385-eb05-90c9874fffc0, 'name': SearchDatastore_Task, 'duration_secs': 0.009336} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.007025] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.007157] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 34338563-05d4-477b-8480-6ef4cbf28e72/34338563-05d4-477b-8480-6ef4cbf28e72.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 759.007379] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d6cf54f-f031-4d0c-be16-bd401cd636fc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.013281] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for the task: (returnval){ [ 759.013281] env[61898]: value = "task-1240491" [ 759.013281] env[61898]: _type = "Task" [ 759.013281] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.021183] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.215908] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240490, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.297710] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.381612] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "refresh_cache-1aa03975-f18f-4e64-836e-e991b73ee9d5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.381788] env[61898]: DEBUG nova.compute.manager [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Instance network_info: |[{"id": "ddb06f4c-13ed-4322-b1e8-f4022b32e4f4", "address": "fa:16:3e:d2:5d:13", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddb06f4c-13", "ovs_interfaceid": "ddb06f4c-13ed-4322-b1e8-f4022b32e4f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 759.382153] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:5d:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ddb06f4c-13ed-4322-b1e8-f4022b32e4f4', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 759.389876] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Creating folder: Project (819c8a7ff0aa4d7186bd859e4b56d16e). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 759.390232] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b375587f-286d-4686-8da1-6ed33f1bc29f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.403054] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Created folder: Project (819c8a7ff0aa4d7186bd859e4b56d16e) in parent group-v267550. [ 759.403276] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Creating folder: Instances. Parent ref: group-v267593. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 759.403535] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f36ca069-1f1b-4888-ac3d-c8e3bba562e0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.415553] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Created folder: Instances in parent group-v267593. [ 759.415716] env[61898]: DEBUG oslo.service.loopingcall [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.415923] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 759.416153] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-186e16e3-7420-493d-ad55-9b15b29313f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.439517] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 759.439517] env[61898]: value = "task-1240494" [ 759.439517] env[61898]: _type = "Task" [ 759.439517] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.450652] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240494, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.490875] env[61898]: DEBUG nova.scheduler.client.report [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 759.527341] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240491, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.440108} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.527611] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 34338563-05d4-477b-8480-6ef4cbf28e72/34338563-05d4-477b-8480-6ef4cbf28e72.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 759.528077] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 759.528197] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e84a5bf-8dd8-4a5b-a3a3-595d3bc116e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.534536] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for the task: (returnval){ [ 759.534536] env[61898]: value = "task-1240495" [ 759.534536] env[61898]: _type = "Task" [ 759.534536] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.543276] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.581498] env[61898]: DEBUG nova.compute.manager [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 759.607894] env[61898]: DEBUG nova.virt.hardware [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 759.608211] env[61898]: DEBUG nova.virt.hardware [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 759.608409] env[61898]: DEBUG nova.virt.hardware [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 759.608650] env[61898]: DEBUG nova.virt.hardware [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 759.608839] env[61898]: DEBUG nova.virt.hardware [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 759.609141] env[61898]: DEBUG nova.virt.hardware [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 759.609405] env[61898]: DEBUG nova.virt.hardware [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 759.609580] env[61898]: DEBUG nova.virt.hardware [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 759.609753] env[61898]: DEBUG nova.virt.hardware [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 759.609915] env[61898]: DEBUG nova.virt.hardware [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 759.610130] env[61898]: DEBUG nova.virt.hardware [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 759.611094] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52632fc0-6104-47b2-9e58-96422c740365 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.619332] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec707ba3-7d9c-4d19-b434-94b5e36ba28a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.715644] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240490, 'name': ReconfigVM_Task, 'duration_secs': 0.724461} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.715918] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 4c744673-0d9b-44ef-938f-372b101a2053/4c744673-0d9b-44ef-938f-372b101a2053.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 759.716651] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2be97d67-c2ab-4244-baa6-344e551bb09d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.724321] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 759.724321] env[61898]: value = "task-1240496" [ 759.724321] env[61898]: _type = "Task" [ 759.724321] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.734991] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240496, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.846129] env[61898]: DEBUG nova.compute.manager [req-5bbb5afa-8b8c-4b6f-bee0-c685fe6fa4dd req-c7608e8d-6234-4ab5-a17f-7bd2548542ec service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Received event network-changed-ddb06f4c-13ed-4322-b1e8-f4022b32e4f4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 759.846129] env[61898]: DEBUG nova.compute.manager [req-5bbb5afa-8b8c-4b6f-bee0-c685fe6fa4dd req-c7608e8d-6234-4ab5-a17f-7bd2548542ec service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Refreshing instance network info cache due to event network-changed-ddb06f4c-13ed-4322-b1e8-f4022b32e4f4. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 759.846129] env[61898]: DEBUG oslo_concurrency.lockutils [req-5bbb5afa-8b8c-4b6f-bee0-c685fe6fa4dd req-c7608e8d-6234-4ab5-a17f-7bd2548542ec service nova] Acquiring lock "refresh_cache-1aa03975-f18f-4e64-836e-e991b73ee9d5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.846129] env[61898]: DEBUG oslo_concurrency.lockutils [req-5bbb5afa-8b8c-4b6f-bee0-c685fe6fa4dd req-c7608e8d-6234-4ab5-a17f-7bd2548542ec service nova] Acquired lock "refresh_cache-1aa03975-f18f-4e64-836e-e991b73ee9d5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.846816] env[61898]: DEBUG nova.network.neutron [req-5bbb5afa-8b8c-4b6f-bee0-c685fe6fa4dd req-c7608e8d-6234-4ab5-a17f-7bd2548542ec service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Refreshing network info cache for port ddb06f4c-13ed-4322-b1e8-f4022b32e4f4 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 759.950220] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240494, 'name': CreateVM_Task, 'duration_secs': 0.393702} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.953835] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 759.953835] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.953835] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.953835] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 759.953835] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd5e479c-72a0-42ac-8277-6e2302271e33 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.956438] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 759.956438] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52c5996a-e235-5798-ad31-24100dd9e310" [ 759.956438] env[61898]: _type = "Task" [ 759.956438] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.963987] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c5996a-e235-5798-ad31-24100dd9e310, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.997079] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.437s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.997825] env[61898]: DEBUG nova.compute.manager [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 760.000739] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.163s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.006041] env[61898]: INFO nova.compute.claims [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 760.046744] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064393} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.047191] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 760.048064] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63eb360c-e0a6-46f3-ab3c-b17f308039d5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.071435] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 34338563-05d4-477b-8480-6ef4cbf28e72/34338563-05d4-477b-8480-6ef4cbf28e72.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 760.072278] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8115159e-7de4-4ee8-bf87-71bb49daa3cc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.093051] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for the task: (returnval){ [ 760.093051] env[61898]: value = "task-1240497" [ 760.093051] env[61898]: _type = "Task" [ 760.093051] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.101794] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240497, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.234047] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240496, 'name': Rename_Task, 'duration_secs': 0.173446} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.234047] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 760.234047] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed8aa289-6e2e-4968-9b9d-c369e3bd0e46 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.239778] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 760.239778] env[61898]: value = "task-1240498" [ 760.239778] env[61898]: _type = "Task" [ 760.239778] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.247306] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240498, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.470098] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c5996a-e235-5798-ad31-24100dd9e310, 'name': SearchDatastore_Task, 'duration_secs': 0.008971} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.470555] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.470899] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 760.471270] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.471511] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.471778] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 760.472122] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61dd74c9-e28c-422a-8916-06f581985240 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.483250] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 760.483250] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 760.483250] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acb35ee4-e5d2-4bcf-8b4c-5d96cbab7fab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.488843] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 760.488843] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b4459f-6db3-467a-9b03-f4d94a0347b1" [ 760.488843] env[61898]: _type = "Task" [ 760.488843] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.497440] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b4459f-6db3-467a-9b03-f4d94a0347b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.511106] env[61898]: DEBUG nova.compute.utils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 760.512749] env[61898]: DEBUG nova.compute.manager [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 760.513129] env[61898]: DEBUG nova.network.neutron [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 760.605823] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240497, 'name': ReconfigVM_Task, 'duration_secs': 0.248173} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.605823] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 34338563-05d4-477b-8480-6ef4cbf28e72/34338563-05d4-477b-8480-6ef4cbf28e72.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.605823] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b944f3bb-3436-4208-87f4-9a9c447584d4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.613019] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for the task: (returnval){ [ 760.613019] env[61898]: value = "task-1240499" [ 760.613019] env[61898]: _type = "Task" [ 760.613019] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.619055] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240499, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.706604] env[61898]: DEBUG nova.policy [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88a5606350204fcfbba29e2d90e90e2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11539a8a92af4208a15e69afe3dc60e8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 760.749883] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240498, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.792305] env[61898]: DEBUG nova.network.neutron [req-5bbb5afa-8b8c-4b6f-bee0-c685fe6fa4dd req-c7608e8d-6234-4ab5-a17f-7bd2548542ec service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Updated VIF entry in instance network info cache for port ddb06f4c-13ed-4322-b1e8-f4022b32e4f4. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 760.792662] env[61898]: DEBUG nova.network.neutron [req-5bbb5afa-8b8c-4b6f-bee0-c685fe6fa4dd req-c7608e8d-6234-4ab5-a17f-7bd2548542ec service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Updating instance_info_cache with network_info: [{"id": "ddb06f4c-13ed-4322-b1e8-f4022b32e4f4", "address": "fa:16:3e:d2:5d:13", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddb06f4c-13", "ovs_interfaceid": "ddb06f4c-13ed-4322-b1e8-f4022b32e4f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.987325] env[61898]: DEBUG nova.network.neutron [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Successfully created port: 53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 761.003937] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b4459f-6db3-467a-9b03-f4d94a0347b1, 'name': SearchDatastore_Task, 'duration_secs': 0.007848} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.004880] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc2a641e-75f9-47cd-9cf0-6ffa182ef917 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.010217] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 761.010217] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]529c5caa-fa08-4cf2-7a8d-32147ef733c4" [ 761.010217] env[61898]: _type = "Task" [ 761.010217] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.013733] env[61898]: DEBUG nova.compute.manager [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 761.023490] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]529c5caa-fa08-4cf2-7a8d-32147ef733c4, 'name': SearchDatastore_Task, 'duration_secs': 0.008684} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.023724] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.023966] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 761.024469] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60958c77-ef0d-4e1e-95ee-49727fdf6af4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.030173] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 761.030173] env[61898]: value = "task-1240500" [ 761.030173] env[61898]: _type = "Task" [ 761.030173] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.037608] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240500, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.121655] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240499, 'name': Rename_Task, 'duration_secs': 0.137913} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.121916] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 761.122509] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b705a36f-ef3d-4650-ab8f-9e0c54680a0b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.130545] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for the task: (returnval){ [ 761.130545] env[61898]: value = "task-1240501" [ 761.130545] env[61898]: _type = "Task" [ 761.130545] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.138196] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240501, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.151543] env[61898]: DEBUG nova.network.neutron [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Successfully updated port: c2aa5537-be01-417c-8f48-83f2f9a04ff4 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 761.251700] env[61898]: DEBUG oslo_vmware.api [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240498, 'name': PowerOnVM_Task, 'duration_secs': 0.99584} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.255029] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 761.255029] env[61898]: INFO nova.compute.manager [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Took 9.51 seconds to spawn the instance on the hypervisor. [ 761.255029] env[61898]: DEBUG nova.compute.manager [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 761.256764] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d4a1a6-7bc9-4c2e-9e3d-f9a0838854d2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.295380] env[61898]: DEBUG oslo_concurrency.lockutils [req-5bbb5afa-8b8c-4b6f-bee0-c685fe6fa4dd req-c7608e8d-6234-4ab5-a17f-7bd2548542ec service nova] Releasing lock "refresh_cache-1aa03975-f18f-4e64-836e-e991b73ee9d5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.352969] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d989b4-f5a9-496c-b8a0-de50a18b89c2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.361137] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05824a51-fcd5-42ea-bc3c-1e1d11539d9b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.395042] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6518675b-347e-49c3-8cb6-cbfb54d89a38 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.403339] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f45c925-eca6-4155-9cf6-08cdc0204440 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.418334] env[61898]: DEBUG nova.compute.provider_tree [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.540688] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240500, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487007} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.540936] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 761.541216] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 761.541969] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe094419-d176-4175-abbe-7435e82f9c74 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.547158] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 761.547158] env[61898]: value = "task-1240502" [ 761.547158] env[61898]: _type = "Task" [ 761.547158] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.554664] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240502, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.640091] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240501, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.652817] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "refresh_cache-aab10d8f-0d25-4351-a627-7222be63895e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.652966] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "refresh_cache-aab10d8f-0d25-4351-a627-7222be63895e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.653137] env[61898]: DEBUG nova.network.neutron [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 761.783259] env[61898]: INFO nova.compute.manager [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Took 29.16 seconds to build instance. [ 761.922176] env[61898]: DEBUG nova.scheduler.client.report [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 762.027031] env[61898]: DEBUG nova.compute.manager [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 762.050247] env[61898]: DEBUG nova.compute.manager [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Received event network-changed-a9c5b33c-5075-4ced-8700-0ca1e0071262 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 762.050247] env[61898]: DEBUG nova.compute.manager [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Refreshing instance network info cache due to event network-changed-a9c5b33c-5075-4ced-8700-0ca1e0071262. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 762.050247] env[61898]: DEBUG oslo_concurrency.lockutils [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] Acquiring lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.050247] env[61898]: DEBUG oslo_concurrency.lockutils [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] Acquired lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.050247] env[61898]: DEBUG nova.network.neutron [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Refreshing network info cache for port a9c5b33c-5075-4ced-8700-0ca1e0071262 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 762.055407] env[61898]: DEBUG nova.virt.hardware [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 762.055653] env[61898]: DEBUG nova.virt.hardware [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 762.055813] env[61898]: DEBUG nova.virt.hardware [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 762.056297] env[61898]: DEBUG nova.virt.hardware [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 762.056468] env[61898]: DEBUG nova.virt.hardware [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 762.056750] env[61898]: DEBUG nova.virt.hardware [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 762.056840] env[61898]: DEBUG nova.virt.hardware [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 762.056975] env[61898]: DEBUG nova.virt.hardware [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 762.057158] env[61898]: DEBUG nova.virt.hardware [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 762.057319] env[61898]: DEBUG nova.virt.hardware [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 762.057486] env[61898]: DEBUG nova.virt.hardware [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 762.058683] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e93bac1-ad4e-4fd8-a7c3-a2569b0b3978 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.071146] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee9ed9a-e1ca-4810-8464-8142f7d4e116 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.075082] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240502, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.056646} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.075708] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 762.076727] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6321d8-24bd-4aeb-ab2a-a79364dfdb5d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.105806] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 762.106348] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b709eda9-352d-437c-b3e3-7b32eaeee9e4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.125811] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 762.125811] env[61898]: value = "task-1240503" [ 762.125811] env[61898]: _type = "Task" [ 762.125811] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.133566] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240503, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.141170] env[61898]: DEBUG oslo_vmware.api [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240501, 'name': PowerOnVM_Task, 'duration_secs': 0.689212} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.141445] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 762.141662] env[61898]: INFO nova.compute.manager [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Took 7.93 seconds to spawn the instance on the hypervisor. [ 762.141841] env[61898]: DEBUG nova.compute.manager [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 762.142577] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992c82ea-1aa0-4eee-985d-975dc2e7df81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.215768] env[61898]: DEBUG nova.network.neutron [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 762.287142] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4e00cbd4-3c52-46b5-84dd-c5eadcb84d8f tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "4c744673-0d9b-44ef-938f-372b101a2053" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 144.547s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.427682] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.428295] env[61898]: DEBUG nova.compute.manager [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 762.430960] env[61898]: DEBUG oslo_concurrency.lockutils [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.774s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.432428] env[61898]: INFO nova.compute.claims [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.531016] env[61898]: DEBUG nova.network.neutron [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Successfully updated port: 53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 762.611964] env[61898]: DEBUG nova.compute.manager [req-29f3be80-6c8c-4ad9-ace2-f4ac4552925a req-872300b2-5c2a-414c-ae1f-c8430740c10a service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Received event network-vif-plugged-53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 762.612178] env[61898]: DEBUG oslo_concurrency.lockutils [req-29f3be80-6c8c-4ad9-ace2-f4ac4552925a req-872300b2-5c2a-414c-ae1f-c8430740c10a service nova] Acquiring lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.612384] env[61898]: DEBUG oslo_concurrency.lockutils [req-29f3be80-6c8c-4ad9-ace2-f4ac4552925a req-872300b2-5c2a-414c-ae1f-c8430740c10a service nova] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.612540] env[61898]: DEBUG oslo_concurrency.lockutils [req-29f3be80-6c8c-4ad9-ace2-f4ac4552925a req-872300b2-5c2a-414c-ae1f-c8430740c10a service nova] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.616970] env[61898]: DEBUG nova.compute.manager [req-29f3be80-6c8c-4ad9-ace2-f4ac4552925a req-872300b2-5c2a-414c-ae1f-c8430740c10a service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] No waiting events found dispatching network-vif-plugged-53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 762.616970] env[61898]: WARNING nova.compute.manager [req-29f3be80-6c8c-4ad9-ace2-f4ac4552925a req-872300b2-5c2a-414c-ae1f-c8430740c10a service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Received unexpected event network-vif-plugged-53a6375d-a9c3-4c2e-8568-942c3c43bf4a for instance with vm_state building and task_state spawning. [ 762.642210] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240503, 'name': ReconfigVM_Task, 'duration_secs': 0.337102} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.642460] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 762.643143] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a855083-7fdf-4530-b968-80d963573ef9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.647442] env[61898]: DEBUG nova.network.neutron [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Updating instance_info_cache with network_info: [{"id": "c2aa5537-be01-417c-8f48-83f2f9a04ff4", "address": "fa:16:3e:6f:4b:fb", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2aa5537-be", "ovs_interfaceid": "c2aa5537-be01-417c-8f48-83f2f9a04ff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.649571] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 762.649571] env[61898]: value = "task-1240504" [ 762.649571] env[61898]: _type = "Task" [ 762.649571] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.664173] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240504, 'name': Rename_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.664631] env[61898]: INFO nova.compute.manager [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Took 23.27 seconds to build instance. [ 762.790772] env[61898]: DEBUG nova.compute.manager [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 762.813925] env[61898]: DEBUG nova.network.neutron [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Updated VIF entry in instance network info cache for port a9c5b33c-5075-4ced-8700-0ca1e0071262. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 762.814304] env[61898]: DEBUG nova.network.neutron [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Updating instance_info_cache with network_info: [{"id": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "address": "fa:16:3e:4c:bb:e6", "network": {"id": "a7782ec9-c3cb-41be-b52c-f40deb036970", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-870279198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ca09762c2e4b119437aa5b1a36e133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9c5b33c-50", "ovs_interfaceid": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.936825] env[61898]: DEBUG nova.compute.utils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 762.942682] env[61898]: DEBUG nova.compute.manager [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 762.943067] env[61898]: DEBUG nova.network.neutron [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 762.987138] env[61898]: DEBUG nova.policy [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f25c631adf3c4f68b374a35b767a9429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30bd396aa1ff45ad946bc1a6fdb3b40b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 763.037766] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.037766] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.037766] env[61898]: DEBUG nova.network.neutron [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.158212] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "refresh_cache-aab10d8f-0d25-4351-a627-7222be63895e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.158212] env[61898]: DEBUG nova.compute.manager [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Instance network_info: |[{"id": "c2aa5537-be01-417c-8f48-83f2f9a04ff4", "address": "fa:16:3e:6f:4b:fb", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2aa5537-be", "ovs_interfaceid": "c2aa5537-be01-417c-8f48-83f2f9a04ff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 763.158387] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:4b:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2aa5537-be01-417c-8f48-83f2f9a04ff4', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 763.164850] env[61898]: DEBUG oslo.service.loopingcall [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 763.165536] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 763.165755] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a59d70a-68c3-4a65-a521-d240f1275af6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.190656] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ac1c4eb4-73f9-4eb0-a3cb-20f960e1b063 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "34338563-05d4-477b-8480-6ef4cbf28e72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.628s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.191281] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240504, 'name': Rename_Task, 'duration_secs': 0.152392} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.193599] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 763.193871] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f02f608-f187-46c7-b773-50190e19ceb8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.205021] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 763.205021] env[61898]: value = "task-1240505" [ 763.205021] env[61898]: _type = "Task" [ 763.205021] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.205021] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 763.205021] env[61898]: value = "task-1240506" [ 763.205021] env[61898]: _type = "Task" [ 763.205021] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.216019] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240505, 'name': CreateVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.219141] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240506, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.273086] env[61898]: DEBUG nova.network.neutron [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Successfully created port: 92b514d1-73d5-449b-8f17-dd283c2d7014 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.316929] env[61898]: DEBUG oslo_concurrency.lockutils [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] Releasing lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.317288] env[61898]: DEBUG nova.compute.manager [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Received event network-vif-plugged-c2aa5537-be01-417c-8f48-83f2f9a04ff4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 763.318030] env[61898]: DEBUG oslo_concurrency.lockutils [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] Acquiring lock "aab10d8f-0d25-4351-a627-7222be63895e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.318030] env[61898]: DEBUG oslo_concurrency.lockutils [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] Lock "aab10d8f-0d25-4351-a627-7222be63895e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.318030] env[61898]: DEBUG oslo_concurrency.lockutils [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] Lock "aab10d8f-0d25-4351-a627-7222be63895e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.318219] env[61898]: DEBUG nova.compute.manager [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] No waiting events found dispatching network-vif-plugged-c2aa5537-be01-417c-8f48-83f2f9a04ff4 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 763.318320] env[61898]: WARNING nova.compute.manager [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Received unexpected event network-vif-plugged-c2aa5537-be01-417c-8f48-83f2f9a04ff4 for instance with vm_state building and task_state spawning. [ 763.318554] env[61898]: DEBUG nova.compute.manager [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Received event network-changed-c2aa5537-be01-417c-8f48-83f2f9a04ff4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 763.318780] env[61898]: DEBUG nova.compute.manager [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Refreshing instance network info cache due to event network-changed-c2aa5537-be01-417c-8f48-83f2f9a04ff4. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 763.318952] env[61898]: DEBUG oslo_concurrency.lockutils [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] Acquiring lock "refresh_cache-aab10d8f-0d25-4351-a627-7222be63895e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.319123] env[61898]: DEBUG oslo_concurrency.lockutils [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] Acquired lock "refresh_cache-aab10d8f-0d25-4351-a627-7222be63895e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.319279] env[61898]: DEBUG nova.network.neutron [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Refreshing network info cache for port c2aa5537-be01-417c-8f48-83f2f9a04ff4 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 763.402273] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.443283] env[61898]: DEBUG nova.compute.manager [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 763.469848] env[61898]: DEBUG nova.scheduler.client.report [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Refreshing inventories for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 763.485602] env[61898]: DEBUG nova.scheduler.client.report [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Updating ProviderTree inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 763.485602] env[61898]: DEBUG nova.compute.provider_tree [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 763.498840] env[61898]: DEBUG nova.scheduler.client.report [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Refreshing aggregate associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, aggregates: None {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 763.518326] env[61898]: DEBUG nova.scheduler.client.report [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Refreshing trait associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 763.579294] env[61898]: DEBUG nova.network.neutron [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.673845] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "626caecc-6389-4064-aafd-9968cee262ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.674133] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "626caecc-6389-4064-aafd-9968cee262ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.698194] env[61898]: DEBUG nova.compute.manager [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 763.718720] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240505, 'name': CreateVM_Task, 'duration_secs': 0.353115} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.725545] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 763.726162] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240506, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.727078] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.727078] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.727433] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 763.727736] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edfd4095-1338-4a3c-acb8-ce4b8384b6c2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.733029] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 763.733029] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5295a02e-a10a-19ea-2d3e-626cc6d9c731" [ 763.733029] env[61898]: _type = "Task" [ 763.733029] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.746489] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5295a02e-a10a-19ea-2d3e-626cc6d9c731, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.808144] env[61898]: DEBUG nova.network.neutron [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updating instance_info_cache with network_info: [{"id": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "address": "fa:16:3e:be:2b:9a", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53a6375d-a9", "ovs_interfaceid": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.868134] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddbc8b6-9c18-4727-bee8-e1748bffd776 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.878169] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf351bea-8e46-4814-854e-af949d0b18f4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.909121] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8122c235-1bb1-4e4d-adf8-a5466f7dad66 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.917048] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8846b559-da5a-44d3-b8c4-b04ba6d0dc5b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.930990] env[61898]: DEBUG nova.compute.provider_tree [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.118096] env[61898]: DEBUG nova.network.neutron [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Updated VIF entry in instance network info cache for port c2aa5537-be01-417c-8f48-83f2f9a04ff4. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 764.118648] env[61898]: DEBUG nova.network.neutron [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Updating instance_info_cache with network_info: [{"id": "c2aa5537-be01-417c-8f48-83f2f9a04ff4", "address": "fa:16:3e:6f:4b:fb", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2aa5537-be", "ovs_interfaceid": "c2aa5537-be01-417c-8f48-83f2f9a04ff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.217467] env[61898]: DEBUG oslo_vmware.api [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240506, 'name': PowerOnVM_Task, 'duration_secs': 0.56} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.218038] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 764.218038] env[61898]: INFO nova.compute.manager [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Took 7.10 seconds to spawn the instance on the hypervisor. [ 764.218213] env[61898]: DEBUG nova.compute.manager [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 764.218986] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899875ba-daef-4414-8df8-e8db97254024 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.222327] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.242637] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5295a02e-a10a-19ea-2d3e-626cc6d9c731, 'name': SearchDatastore_Task, 'duration_secs': 0.010704} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.243129] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.243233] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.243460] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.243657] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.244666] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.244666] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df04e5d7-a5bc-462f-8fe2-e77a27d0cf6c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.256515] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.257203] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 764.257466] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-075c11f2-99b5-44c6-9234-123be37e3610 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.270464] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 764.270464] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528a8dc9-cdfe-8719-5beb-fd535866fadd" [ 764.270464] env[61898]: _type = "Task" [ 764.270464] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.281286] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]528a8dc9-cdfe-8719-5beb-fd535866fadd, 'name': SearchDatastore_Task, 'duration_secs': 0.012037} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.285024] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0319353-53d9-4b5c-b094-9dd805291e9a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.288278] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 764.288278] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e74650-2a57-6c79-1fb0-91ae7bc2abf0" [ 764.288278] env[61898]: _type = "Task" [ 764.288278] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.298446] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e74650-2a57-6c79-1fb0-91ae7bc2abf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.313613] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.313995] env[61898]: DEBUG nova.compute.manager [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Instance network_info: |[{"id": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "address": "fa:16:3e:be:2b:9a", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53a6375d-a9", "ovs_interfaceid": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 764.314432] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:2b:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd69a4b11-8d65-435f-94a5-28f74a39a718', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53a6375d-a9c3-4c2e-8568-942c3c43bf4a', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 764.322278] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Creating folder: Project (11539a8a92af4208a15e69afe3dc60e8). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.322731] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98685b22-3535-4fdb-8ecb-3faca7d1adc6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.333489] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Created folder: Project (11539a8a92af4208a15e69afe3dc60e8) in parent group-v267550. [ 764.333680] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Creating folder: Instances. Parent ref: group-v267597. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.333960] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fd4436d-67f1-486d-b103-4b14f7ad95f9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.343153] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Created folder: Instances in parent group-v267597. [ 764.343390] env[61898]: DEBUG oslo.service.loopingcall [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 764.343579] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 764.343779] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b646435-b89b-4b05-9ffb-f80e9e171187 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.364131] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 764.364131] env[61898]: value = "task-1240509" [ 764.364131] env[61898]: _type = "Task" [ 764.364131] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.371822] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240509, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.436179] env[61898]: DEBUG nova.scheduler.client.report [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 764.453312] env[61898]: DEBUG nova.compute.manager [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 764.480664] env[61898]: DEBUG nova.virt.hardware [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 764.480931] env[61898]: DEBUG nova.virt.hardware [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 764.481096] env[61898]: DEBUG nova.virt.hardware [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.481307] env[61898]: DEBUG nova.virt.hardware [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 764.481457] env[61898]: DEBUG nova.virt.hardware [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.481603] env[61898]: DEBUG nova.virt.hardware [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 764.481806] env[61898]: DEBUG nova.virt.hardware [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 764.481976] env[61898]: DEBUG nova.virt.hardware [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 764.482167] env[61898]: DEBUG nova.virt.hardware [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 764.482331] env[61898]: DEBUG nova.virt.hardware [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 764.482498] env[61898]: DEBUG nova.virt.hardware [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 764.483384] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6073d4bb-c6a8-45f0-8e04-b9fff194ef29 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.491798] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7982142e-f264-4379-8d9d-750e6f175d43 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.622249] env[61898]: DEBUG oslo_concurrency.lockutils [req-d54d803e-7961-4821-bbf0-55f898b0b4a9 req-d63337c0-dd73-4cbe-9fc5-038a81f621cd service nova] Releasing lock "refresh_cache-aab10d8f-0d25-4351-a627-7222be63895e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.640226] env[61898]: DEBUG nova.compute.manager [req-b0850957-1202-4e4f-89dd-cb1c6abe0cb4 req-a4001912-6e55-4d82-bb12-3d52a0246a62 service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Received event network-changed-53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 764.640537] env[61898]: DEBUG nova.compute.manager [req-b0850957-1202-4e4f-89dd-cb1c6abe0cb4 req-a4001912-6e55-4d82-bb12-3d52a0246a62 service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Refreshing instance network info cache due to event network-changed-53a6375d-a9c3-4c2e-8568-942c3c43bf4a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 764.640794] env[61898]: DEBUG oslo_concurrency.lockutils [req-b0850957-1202-4e4f-89dd-cb1c6abe0cb4 req-a4001912-6e55-4d82-bb12-3d52a0246a62 service nova] Acquiring lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.640945] env[61898]: DEBUG oslo_concurrency.lockutils [req-b0850957-1202-4e4f-89dd-cb1c6abe0cb4 req-a4001912-6e55-4d82-bb12-3d52a0246a62 service nova] Acquired lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.641126] env[61898]: DEBUG nova.network.neutron [req-b0850957-1202-4e4f-89dd-cb1c6abe0cb4 req-a4001912-6e55-4d82-bb12-3d52a0246a62 service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Refreshing network info cache for port 53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 764.738121] env[61898]: INFO nova.compute.manager [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Took 23.43 seconds to build instance. [ 764.800908] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e74650-2a57-6c79-1fb0-91ae7bc2abf0, 'name': SearchDatastore_Task, 'duration_secs': 0.009404} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.801797] env[61898]: DEBUG nova.network.neutron [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Successfully updated port: 92b514d1-73d5-449b-8f17-dd283c2d7014 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 764.804263] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.804263] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] aab10d8f-0d25-4351-a627-7222be63895e/aab10d8f-0d25-4351-a627-7222be63895e.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 764.804263] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75ae1438-0831-4540-9432-8d62e25ae2df {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.810419] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 764.810419] env[61898]: value = "task-1240510" [ 764.810419] env[61898]: _type = "Task" [ 764.810419] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.819669] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.873895] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240509, 'name': CreateVM_Task, 'duration_secs': 0.448506} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.874091] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 764.874784] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.874934] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.875275] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 764.875527] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5515d2a8-71af-421a-89f3-409f7b38b24d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.879941] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 764.879941] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521833e8-6077-e8d8-6752-ddabe10b9811" [ 764.879941] env[61898]: _type = "Task" [ 764.879941] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.887863] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521833e8-6077-e8d8-6752-ddabe10b9811, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.941655] env[61898]: DEBUG oslo_concurrency.lockutils [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.942171] env[61898]: DEBUG nova.compute.manager [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 764.944883] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.273s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.946352] env[61898]: INFO nova.compute.claims [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.241259] env[61898]: DEBUG oslo_concurrency.lockutils [None req-48c70320-4e95-4225-be1d-f05133b49816 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "1aa03975-f18f-4e64-836e-e991b73ee9d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.309s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.307320] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "refresh_cache-320577e5-f197-4f66-a94f-9b9ba2479325" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.307538] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "refresh_cache-320577e5-f197-4f66-a94f-9b9ba2479325" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.307696] env[61898]: DEBUG nova.network.neutron [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 765.320860] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240510, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.369286] env[61898]: DEBUG nova.network.neutron [req-b0850957-1202-4e4f-89dd-cb1c6abe0cb4 req-a4001912-6e55-4d82-bb12-3d52a0246a62 service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updated VIF entry in instance network info cache for port 53a6375d-a9c3-4c2e-8568-942c3c43bf4a. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 765.369644] env[61898]: DEBUG nova.network.neutron [req-b0850957-1202-4e4f-89dd-cb1c6abe0cb4 req-a4001912-6e55-4d82-bb12-3d52a0246a62 service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updating instance_info_cache with network_info: [{"id": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "address": "fa:16:3e:be:2b:9a", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53a6375d-a9", "ovs_interfaceid": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.391875] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521833e8-6077-e8d8-6752-ddabe10b9811, 'name': SearchDatastore_Task, 'duration_secs': 0.022493} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.392170] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.392406] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 765.392709] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.393500] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.393500] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 765.393500] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19d24c88-ac90-46a1-a2f2-be425d94695e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.407156] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 765.407496] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 765.408468] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-892200b5-583e-4776-b6cc-7ef2e9571d94 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.414163] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 765.414163] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]529713a7-30fd-169c-d640-4744dffc3323" [ 765.414163] env[61898]: _type = "Task" [ 765.414163] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.424234] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]529713a7-30fd-169c-d640-4744dffc3323, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.451492] env[61898]: DEBUG nova.compute.utils [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 765.455392] env[61898]: DEBUG nova.compute.manager [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 765.455616] env[61898]: DEBUG nova.network.neutron [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 765.501100] env[61898]: DEBUG nova.policy [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2339754ca62041c5b4dba2ebb1b38726', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d19464641434cc7a06ca4eed2d5bf3c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 765.748018] env[61898]: DEBUG nova.compute.manager [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 765.787233] env[61898]: DEBUG nova.network.neutron [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Successfully created port: f036aa10-aacf-4943-b51b-28b2693d3448 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 765.822197] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240510, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.717931} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.822530] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] aab10d8f-0d25-4351-a627-7222be63895e/aab10d8f-0d25-4351-a627-7222be63895e.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 765.822784] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.823318] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d79b651-c487-4c07-b84e-6243d2a7fd12 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.829548] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 765.829548] env[61898]: value = "task-1240511" [ 765.829548] env[61898]: _type = "Task" [ 765.829548] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.837232] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240511, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.855870] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "34338563-05d4-477b-8480-6ef4cbf28e72" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.856179] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "34338563-05d4-477b-8480-6ef4cbf28e72" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.856386] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "34338563-05d4-477b-8480-6ef4cbf28e72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.856569] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "34338563-05d4-477b-8480-6ef4cbf28e72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.856766] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "34338563-05d4-477b-8480-6ef4cbf28e72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.859371] env[61898]: INFO nova.compute.manager [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Terminating instance [ 765.867570] env[61898]: DEBUG nova.network.neutron [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.871917] env[61898]: DEBUG oslo_concurrency.lockutils [req-b0850957-1202-4e4f-89dd-cb1c6abe0cb4 req-a4001912-6e55-4d82-bb12-3d52a0246a62 service nova] Releasing lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.927189] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]529713a7-30fd-169c-d640-4744dffc3323, 'name': SearchDatastore_Task, 'duration_secs': 0.043192} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.927983] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1a3c37f-1b00-4dc7-9f7a-48efabbaefb2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.933259] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 765.933259] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5299884b-98e5-12fb-99fc-0f468f318c82" [ 765.933259] env[61898]: _type = "Task" [ 765.933259] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.945603] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5299884b-98e5-12fb-99fc-0f468f318c82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.958904] env[61898]: DEBUG nova.compute.manager [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 766.136330] env[61898]: DEBUG nova.network.neutron [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Updating instance_info_cache with network_info: [{"id": "92b514d1-73d5-449b-8f17-dd283c2d7014", "address": "fa:16:3e:39:28:6e", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b514d1-73", "ovs_interfaceid": "92b514d1-73d5-449b-8f17-dd283c2d7014", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.266094] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.269194] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a123e39a-1322-46fd-aab1-2a8d00d5e70f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.276789] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0103b08e-23ec-43a9-8b68-c3df8d234c92 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.312461] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52341a7-7d29-4b9c-a79b-48f4c4c445f1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.320112] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c05db88-44ef-46d8-bbbc-04fb8bb93ff6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.335857] env[61898]: DEBUG nova.compute.provider_tree [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.345618] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240511, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.202048} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.346550] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 766.347365] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfde2bcb-86d4-402c-ac10-c14d57a537e1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.371233] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] aab10d8f-0d25-4351-a627-7222be63895e/aab10d8f-0d25-4351-a627-7222be63895e.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 766.371953] env[61898]: DEBUG nova.compute.manager [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 766.372176] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 766.372756] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b8104f9-1aa9-48f6-8ed6-b918d1bca4dc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.388439] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24048c7-35cf-40cf-8b66-185711a313a8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.397068] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 766.401199] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7661a1c-f073-476d-8272-e4a530949e9f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.401199] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 766.401199] env[61898]: value = "task-1240512" [ 766.401199] env[61898]: _type = "Task" [ 766.401199] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.404238] env[61898]: DEBUG oslo_vmware.api [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for the task: (returnval){ [ 766.404238] env[61898]: value = "task-1240513" [ 766.404238] env[61898]: _type = "Task" [ 766.404238] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.410723] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240512, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.415491] env[61898]: DEBUG oslo_vmware.api [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.443475] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5299884b-98e5-12fb-99fc-0f468f318c82, 'name': SearchDatastore_Task, 'duration_secs': 0.011034} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.443750] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.444044] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 45b8dc91-b577-4548-bf3a-32c7c936c616/45b8dc91-b577-4548-bf3a-32c7c936c616.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 766.444352] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9bae83b0-8cf1-4928-bd05-90ab91b6c6b0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.454398] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 766.454398] env[61898]: value = "task-1240514" [ 766.454398] env[61898]: _type = "Task" [ 766.454398] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.464218] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240514, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.466976] env[61898]: INFO nova.virt.block_device [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Booting with volume 5e4f66be-193a-428e-ae80-03e6b55967d5 at /dev/sda [ 766.513676] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a28f6e78-fd24-4aef-a001-c1b029bdc998 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.522458] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3ddd97-5bcc-40af-b213-909a5da22cae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.554944] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce07b488-781a-4c24-844f-f77283894596 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.561565] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f623a76-cf58-4435-8712-a9d1b4cc1dfe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.590658] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153f71ae-3774-4b6c-bb52-ee22ec4acc4f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.597748] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340a7ce5-be33-42c5-b41f-917fdcb4e4c1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.613062] env[61898]: DEBUG nova.virt.block_device [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Updating existing volume attachment record: 0aeae21b-4271-4ab3-b124-9889a8dddcec {{(pid=61898) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 766.638737] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "refresh_cache-320577e5-f197-4f66-a94f-9b9ba2479325" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.639108] env[61898]: DEBUG nova.compute.manager [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Instance network_info: |[{"id": "92b514d1-73d5-449b-8f17-dd283c2d7014", "address": "fa:16:3e:39:28:6e", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b514d1-73", "ovs_interfaceid": "92b514d1-73d5-449b-8f17-dd283c2d7014", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 766.639569] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:28:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4223acd2-30f7-440e-b975-60b30d931694', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92b514d1-73d5-449b-8f17-dd283c2d7014', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 766.649179] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Creating folder: Project (30bd396aa1ff45ad946bc1a6fdb3b40b). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 766.649324] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-476bcb53-2d37-4e3a-acba-f0231754d18a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.660335] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Created folder: Project (30bd396aa1ff45ad946bc1a6fdb3b40b) in parent group-v267550. [ 766.661774] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Creating folder: Instances. Parent ref: group-v267600. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 766.661916] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e774386f-1918-4908-8e9c-2c6d01280470 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.672770] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Created folder: Instances in parent group-v267600. [ 766.673652] env[61898]: DEBUG oslo.service.loopingcall [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 766.674712] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 766.675896] env[61898]: DEBUG nova.compute.manager [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Received event network-vif-plugged-92b514d1-73d5-449b-8f17-dd283c2d7014 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 766.676149] env[61898]: DEBUG oslo_concurrency.lockutils [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] Acquiring lock "320577e5-f197-4f66-a94f-9b9ba2479325-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.676409] env[61898]: DEBUG oslo_concurrency.lockutils [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] Lock "320577e5-f197-4f66-a94f-9b9ba2479325-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.676786] env[61898]: DEBUG oslo_concurrency.lockutils [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] Lock "320577e5-f197-4f66-a94f-9b9ba2479325-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.676786] env[61898]: DEBUG nova.compute.manager [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] No waiting events found dispatching network-vif-plugged-92b514d1-73d5-449b-8f17-dd283c2d7014 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 766.676898] env[61898]: WARNING nova.compute.manager [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Received unexpected event network-vif-plugged-92b514d1-73d5-449b-8f17-dd283c2d7014 for instance with vm_state building and task_state spawning. [ 766.677071] env[61898]: DEBUG nova.compute.manager [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Received event network-changed-92b514d1-73d5-449b-8f17-dd283c2d7014 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 766.677249] env[61898]: DEBUG nova.compute.manager [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Refreshing instance network info cache due to event network-changed-92b514d1-73d5-449b-8f17-dd283c2d7014. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 766.677496] env[61898]: DEBUG oslo_concurrency.lockutils [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] Acquiring lock "refresh_cache-320577e5-f197-4f66-a94f-9b9ba2479325" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.677640] env[61898]: DEBUG oslo_concurrency.lockutils [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] Acquired lock "refresh_cache-320577e5-f197-4f66-a94f-9b9ba2479325" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.677940] env[61898]: DEBUG nova.network.neutron [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Refreshing network info cache for port 92b514d1-73d5-449b-8f17-dd283c2d7014 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 766.679248] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4f30e30-65e0-4faa-952b-af2d62e73532 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.709075] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 766.709075] env[61898]: value = "task-1240517" [ 766.709075] env[61898]: _type = "Task" [ 766.709075] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.718749] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240517, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.842345] env[61898]: DEBUG nova.scheduler.client.report [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 766.915357] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240512, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.923544] env[61898]: DEBUG oslo_vmware.api [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240513, 'name': PowerOffVM_Task, 'duration_secs': 0.194243} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.923544] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 766.923544] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 766.923544] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c91b696-58e5-4c4e-8b58-55ee65e11657 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.965248] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240514, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.986058] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 766.986233] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 766.986423] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Deleting the datastore file [datastore1] 34338563-05d4-477b-8480-6ef4cbf28e72 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 766.986681] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afb002f6-8a13-4a27-b1da-38ce45d4025f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.993111] env[61898]: DEBUG oslo_vmware.api [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for the task: (returnval){ [ 766.993111] env[61898]: value = "task-1240519" [ 766.993111] env[61898]: _type = "Task" [ 766.993111] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.001593] env[61898]: DEBUG oslo_vmware.api [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240519, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.016478] env[61898]: DEBUG nova.network.neutron [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Updated VIF entry in instance network info cache for port 92b514d1-73d5-449b-8f17-dd283c2d7014. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 767.016884] env[61898]: DEBUG nova.network.neutron [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Updating instance_info_cache with network_info: [{"id": "92b514d1-73d5-449b-8f17-dd283c2d7014", "address": "fa:16:3e:39:28:6e", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92b514d1-73", "ovs_interfaceid": "92b514d1-73d5-449b-8f17-dd283c2d7014", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.220106] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240517, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.284107] env[61898]: DEBUG nova.network.neutron [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Successfully updated port: f036aa10-aacf-4943-b51b-28b2693d3448 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 767.348025] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.348652] env[61898]: DEBUG nova.compute.manager [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 767.351389] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.435s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.352935] env[61898]: INFO nova.compute.claims [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 767.410331] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240512, 'name': ReconfigVM_Task, 'duration_secs': 0.518953} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.410648] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Reconfigured VM instance instance-00000038 to attach disk [datastore2] aab10d8f-0d25-4351-a627-7222be63895e/aab10d8f-0d25-4351-a627-7222be63895e.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 767.411631] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e3ba63b-0188-4c4d-8d8e-5b1b1553b50b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.417937] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 767.417937] env[61898]: value = "task-1240520" [ 767.417937] env[61898]: _type = "Task" [ 767.417937] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.427308] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240520, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.467082] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240514, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538535} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.467591] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 45b8dc91-b577-4548-bf3a-32c7c936c616/45b8dc91-b577-4548-bf3a-32c7c936c616.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 767.467919] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 767.468236] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1526dfc-ce84-4a43-a3aa-c0e1649d7031 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.477583] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 767.477583] env[61898]: value = "task-1240521" [ 767.477583] env[61898]: _type = "Task" [ 767.477583] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.486784] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240521, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.503234] env[61898]: DEBUG oslo_vmware.api [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Task: {'id': task-1240519, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193014} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.503476] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 767.503651] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 767.503824] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 767.503993] env[61898]: INFO nova.compute.manager [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Took 1.13 seconds to destroy the instance on the hypervisor. [ 767.504259] env[61898]: DEBUG oslo.service.loopingcall [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.504445] env[61898]: DEBUG nova.compute.manager [-] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 767.504540] env[61898]: DEBUG nova.network.neutron [-] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 767.520053] env[61898]: DEBUG oslo_concurrency.lockutils [req-79141f88-0232-4707-bc81-250b37078311 req-6a582128-9bcc-4643-9663-d4f0bde55611 service nova] Releasing lock "refresh_cache-320577e5-f197-4f66-a94f-9b9ba2479325" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.720065] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240517, 'name': CreateVM_Task, 'duration_secs': 0.627987} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.720247] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 767.722030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.722030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.722030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 767.722030] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc9d013c-2250-4810-88d9-7230b11dd479 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.726378] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 767.726378] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5263a09c-e6e4-967a-de76-c3c1fe6c5c8b" [ 767.726378] env[61898]: _type = "Task" [ 767.726378] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.734519] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5263a09c-e6e4-967a-de76-c3c1fe6c5c8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.787259] env[61898]: DEBUG oslo_concurrency.lockutils [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Acquiring lock "refresh_cache-466cbf07-e945-48d4-a103-5a3ea2b7adf6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.787434] env[61898]: DEBUG oslo_concurrency.lockutils [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Acquired lock "refresh_cache-466cbf07-e945-48d4-a103-5a3ea2b7adf6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.787585] env[61898]: DEBUG nova.network.neutron [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.864240] env[61898]: DEBUG nova.compute.utils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 767.867068] env[61898]: DEBUG nova.compute.manager [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 767.867309] env[61898]: DEBUG nova.network.neutron [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 767.909877] env[61898]: DEBUG nova.policy [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3848d73334ff490696e92ac9da3a4a25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '294362a0c6b04039b589ae5eb0d341ee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 767.927767] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240520, 'name': Rename_Task, 'duration_secs': 0.154823} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.928144] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 767.928280] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6523ee2f-ed04-4dbb-83d8-6deb75ef3738 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.934740] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 767.934740] env[61898]: value = "task-1240522" [ 767.934740] env[61898]: _type = "Task" [ 767.934740] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.943437] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240522, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.986721] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240521, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070058} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.986995] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 767.987849] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cc8a99-bf9c-4199-a419-6e939b710abe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.011340] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 45b8dc91-b577-4548-bf3a-32c7c936c616/45b8dc91-b577-4548-bf3a-32c7c936c616.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 768.012012] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95f9c57d-0057-4590-b0d9-52641a30a57b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.033425] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 768.033425] env[61898]: value = "task-1240523" [ 768.033425] env[61898]: _type = "Task" [ 768.033425] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.045855] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240523, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.157794] env[61898]: DEBUG nova.network.neutron [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Successfully created port: f605f253-f39d-4483-9ab2-634f7db2382b {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.239646] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5263a09c-e6e4-967a-de76-c3c1fe6c5c8b, 'name': SearchDatastore_Task, 'duration_secs': 0.009208} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.239969] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.240237] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.240506] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.240884] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.240884] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.241128] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db78451d-fb25-48d0-9a9a-a6998cf38073 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.246647] env[61898]: DEBUG nova.network.neutron [-] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.250406] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.250601] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.251370] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b3aa3a8-c093-4b91-841a-df8c780a770a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.257337] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 768.257337] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52aae31d-38a5-648e-c180-e44f1e6608fa" [ 768.257337] env[61898]: _type = "Task" [ 768.257337] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.266461] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52aae31d-38a5-648e-c180-e44f1e6608fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.355585] env[61898]: DEBUG nova.network.neutron [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 768.373299] env[61898]: DEBUG nova.compute.manager [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 768.441195] env[61898]: DEBUG nova.network.neutron [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Successfully created port: e15583c8-0ee4-4cbf-bf23-b74d594430fb {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.449083] env[61898]: DEBUG oslo_vmware.api [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240522, 'name': PowerOnVM_Task, 'duration_secs': 0.436189} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.449083] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 768.449326] env[61898]: INFO nova.compute.manager [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Took 8.87 seconds to spawn the instance on the hypervisor. [ 768.450105] env[61898]: DEBUG nova.compute.manager [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 768.451035] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261bffb5-b73f-4dbf-aabe-9f7e21c64758 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.545847] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240523, 'name': ReconfigVM_Task, 'duration_secs': 0.311827} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.545847] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 45b8dc91-b577-4548-bf3a-32c7c936c616/45b8dc91-b577-4548-bf3a-32c7c936c616.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 768.545958] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6fda7ea0-3a35-441c-9cb6-aabec8c38680 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.553252] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 768.553252] env[61898]: value = "task-1240524" [ 768.553252] env[61898]: _type = "Task" [ 768.553252] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.568393] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240524, 'name': Rename_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.597341] env[61898]: DEBUG nova.network.neutron [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Updating instance_info_cache with network_info: [{"id": "f036aa10-aacf-4943-b51b-28b2693d3448", "address": "fa:16:3e:18:67:05", "network": {"id": "eb434da2-9ae2-4942-bfc8-9fcfafa028e3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2040918802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d19464641434cc7a06ca4eed2d5bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db00ec2e-3155-46b6-8170-082f7d86dbe7", "external-id": "nsx-vlan-transportzone-332", "segmentation_id": 332, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf036aa10-aa", "ovs_interfaceid": "f036aa10-aacf-4943-b51b-28b2693d3448", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.700201] env[61898]: DEBUG nova.compute.manager [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 768.700993] env[61898]: DEBUG nova.virt.hardware [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 768.701259] env[61898]: DEBUG nova.virt.hardware [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 768.701425] env[61898]: DEBUG nova.virt.hardware [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 768.701599] env[61898]: DEBUG nova.virt.hardware [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 768.701743] env[61898]: DEBUG nova.virt.hardware [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 768.701886] env[61898]: DEBUG nova.virt.hardware [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 768.702101] env[61898]: DEBUG nova.virt.hardware [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 768.702261] env[61898]: DEBUG nova.virt.hardware [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 768.702425] env[61898]: DEBUG nova.virt.hardware [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 768.702584] env[61898]: DEBUG nova.virt.hardware [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 768.702752] env[61898]: DEBUG nova.virt.hardware [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 768.703623] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db153c40-1d76-4a9a-b825-ee357812a0ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.716531] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c5dea7-9fab-4d8f-b3e5-9859cf3d4684 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.748969] env[61898]: INFO nova.compute.manager [-] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Took 1.24 seconds to deallocate network for instance. [ 768.757102] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca43578-0ab7-4b6d-a90a-f0017ab1be6e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.771595] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52aae31d-38a5-648e-c180-e44f1e6608fa, 'name': SearchDatastore_Task, 'duration_secs': 0.008577} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.772588] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab67bdb-485f-48fd-ad44-13a4c6ad3903 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.776311] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1f92b42-af05-4819-a264-bca41aa36c16 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.783089] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 768.783089] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a359d6-abd3-8fc8-07c3-e986fa8abfd5" [ 768.783089] env[61898]: _type = "Task" [ 768.783089] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.810738] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66bea39d-68db-4fe0-838b-03941fa7f7a6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.819182] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a359d6-abd3-8fc8-07c3-e986fa8abfd5, 'name': SearchDatastore_Task, 'duration_secs': 0.009029} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.821334] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.822033] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 320577e5-f197-4f66-a94f-9b9ba2479325/320577e5-f197-4f66-a94f-9b9ba2479325.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 768.822033] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9fef3539-07f7-434a-8a48-35cd68e7074b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.826016] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3ca7fa-09a0-4f5e-9e5b-0a06c4a9c147 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.839115] env[61898]: DEBUG nova.compute.provider_tree [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.841492] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 768.841492] env[61898]: value = "task-1240525" [ 768.841492] env[61898]: _type = "Task" [ 768.841492] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.849544] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240525, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.903026] env[61898]: DEBUG nova.compute.manager [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Received event network-vif-plugged-f036aa10-aacf-4943-b51b-28b2693d3448 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 768.903026] env[61898]: DEBUG oslo_concurrency.lockutils [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] Acquiring lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.903199] env[61898]: DEBUG oslo_concurrency.lockutils [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] Lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.903321] env[61898]: DEBUG oslo_concurrency.lockutils [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] Lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.903502] env[61898]: DEBUG nova.compute.manager [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] No waiting events found dispatching network-vif-plugged-f036aa10-aacf-4943-b51b-28b2693d3448 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 768.903667] env[61898]: WARNING nova.compute.manager [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Received unexpected event network-vif-plugged-f036aa10-aacf-4943-b51b-28b2693d3448 for instance with vm_state building and task_state spawning. [ 768.903833] env[61898]: DEBUG nova.compute.manager [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Received event network-changed-f036aa10-aacf-4943-b51b-28b2693d3448 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 768.903963] env[61898]: DEBUG nova.compute.manager [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Refreshing instance network info cache due to event network-changed-f036aa10-aacf-4943-b51b-28b2693d3448. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 768.904141] env[61898]: DEBUG oslo_concurrency.lockutils [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] Acquiring lock "refresh_cache-466cbf07-e945-48d4-a103-5a3ea2b7adf6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.975766] env[61898]: INFO nova.compute.manager [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Took 26.75 seconds to build instance. [ 769.064070] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240524, 'name': Rename_Task, 'duration_secs': 0.141394} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.064365] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 769.064614] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0458a748-6f08-49d9-a2e1-305c4596d9b1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.073286] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 769.073286] env[61898]: value = "task-1240526" [ 769.073286] env[61898]: _type = "Task" [ 769.073286] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.083309] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240526, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.100037] env[61898]: DEBUG oslo_concurrency.lockutils [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Releasing lock "refresh_cache-466cbf07-e945-48d4-a103-5a3ea2b7adf6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.100550] env[61898]: DEBUG nova.compute.manager [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Instance network_info: |[{"id": "f036aa10-aacf-4943-b51b-28b2693d3448", "address": "fa:16:3e:18:67:05", "network": {"id": "eb434da2-9ae2-4942-bfc8-9fcfafa028e3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2040918802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d19464641434cc7a06ca4eed2d5bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db00ec2e-3155-46b6-8170-082f7d86dbe7", "external-id": "nsx-vlan-transportzone-332", "segmentation_id": 332, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf036aa10-aa", "ovs_interfaceid": "f036aa10-aacf-4943-b51b-28b2693d3448", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 769.100955] env[61898]: DEBUG oslo_concurrency.lockutils [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] Acquired lock "refresh_cache-466cbf07-e945-48d4-a103-5a3ea2b7adf6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.101190] env[61898]: DEBUG nova.network.neutron [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Refreshing network info cache for port f036aa10-aacf-4943-b51b-28b2693d3448 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 769.102794] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:67:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db00ec2e-3155-46b6-8170-082f7d86dbe7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f036aa10-aacf-4943-b51b-28b2693d3448', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 769.111210] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Creating folder: Project (8d19464641434cc7a06ca4eed2d5bf3c). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.112554] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31755ac7-df4b-43d6-bdad-211c13dcfef3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.127303] env[61898]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 769.127508] env[61898]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61898) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 769.127900] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Folder already exists: Project (8d19464641434cc7a06ca4eed2d5bf3c). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 769.128144] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Creating folder: Instances. Parent ref: group-v267572. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.128398] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b43a69f6-80b2-42a4-a10a-f8e79db590f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.138653] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Created folder: Instances in parent group-v267572. [ 769.138927] env[61898]: DEBUG oslo.service.loopingcall [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 769.139143] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 769.139387] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93eb0b77-6078-4c13-a718-096c6eeb00e6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.161306] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.161306] env[61898]: value = "task-1240529" [ 769.161306] env[61898]: _type = "Task" [ 769.161306] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.172877] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240529, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.260628] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.342962] env[61898]: DEBUG nova.scheduler.client.report [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 769.355793] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240525, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458343} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.356119] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 320577e5-f197-4f66-a94f-9b9ba2479325/320577e5-f197-4f66-a94f-9b9ba2479325.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 769.356363] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 769.357192] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0f13453d-a671-4ea0-bb1e-7a8d4cf95baf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.364718] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 769.364718] env[61898]: value = "task-1240530" [ 769.364718] env[61898]: _type = "Task" [ 769.364718] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.373569] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240530, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.384351] env[61898]: DEBUG nova.compute.manager [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 769.410504] env[61898]: DEBUG nova.virt.hardware [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 769.410779] env[61898]: DEBUG nova.virt.hardware [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 769.410937] env[61898]: DEBUG nova.virt.hardware [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 769.411138] env[61898]: DEBUG nova.virt.hardware [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 769.411282] env[61898]: DEBUG nova.virt.hardware [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 769.411492] env[61898]: DEBUG nova.virt.hardware [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 769.411642] env[61898]: DEBUG nova.virt.hardware [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 769.411804] env[61898]: DEBUG nova.virt.hardware [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 769.411991] env[61898]: DEBUG nova.virt.hardware [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 769.412156] env[61898]: DEBUG nova.virt.hardware [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 769.412315] env[61898]: DEBUG nova.virt.hardware [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 769.413246] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976c5f10-7d71-4f52-9830-354feed31a0c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.421850] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f773710c-bc6e-4555-9865-003cbd60c885 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.480227] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fe0ce046-9a30-46fd-b12b-c24989d34183 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "aab10d8f-0d25-4351-a627-7222be63895e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.679s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.583115] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240526, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.673198] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240529, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.850831] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.851364] env[61898]: DEBUG nova.compute.manager [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 769.854728] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.593s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.877251] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240530, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066756} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.877512] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 769.878298] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8e9bbf-db46-4735-b1e5-ea6f94aa7dcf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.901771] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 320577e5-f197-4f66-a94f-9b9ba2479325/320577e5-f197-4f66-a94f-9b9ba2479325.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.903855] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ec1eac5-b0b0-4c04-9b00-96d88547db4a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.929585] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 769.929585] env[61898]: value = "task-1240531" [ 769.929585] env[61898]: _type = "Task" [ 769.929585] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.940782] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240531, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.979336] env[61898]: DEBUG nova.network.neutron [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Successfully updated port: f605f253-f39d-4483-9ab2-634f7db2382b {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 769.983200] env[61898]: DEBUG nova.compute.manager [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 769.997438] env[61898]: DEBUG nova.network.neutron [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Updated VIF entry in instance network info cache for port f036aa10-aacf-4943-b51b-28b2693d3448. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 769.997933] env[61898]: DEBUG nova.network.neutron [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Updating instance_info_cache with network_info: [{"id": "f036aa10-aacf-4943-b51b-28b2693d3448", "address": "fa:16:3e:18:67:05", "network": {"id": "eb434da2-9ae2-4942-bfc8-9fcfafa028e3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2040918802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d19464641434cc7a06ca4eed2d5bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db00ec2e-3155-46b6-8170-082f7d86dbe7", "external-id": "nsx-vlan-transportzone-332", "segmentation_id": 332, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf036aa10-aa", "ovs_interfaceid": "f036aa10-aacf-4943-b51b-28b2693d3448", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.084815] env[61898]: DEBUG oslo_vmware.api [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240526, 'name': PowerOnVM_Task, 'duration_secs': 0.695878} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.085138] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 770.085409] env[61898]: INFO nova.compute.manager [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Took 8.06 seconds to spawn the instance on the hypervisor. [ 770.085565] env[61898]: DEBUG nova.compute.manager [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 770.086365] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b66554-eb6f-4c9f-943f-e421828e3b6b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.173240] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240529, 'name': CreateVM_Task, 'duration_secs': 0.692423} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.173486] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 770.174611] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267575', 'volume_id': '5e4f66be-193a-428e-ae80-03e6b55967d5', 'name': 'volume-5e4f66be-193a-428e-ae80-03e6b55967d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '466cbf07-e945-48d4-a103-5a3ea2b7adf6', 'attached_at': '', 'detached_at': '', 'volume_id': '5e4f66be-193a-428e-ae80-03e6b55967d5', 'serial': '5e4f66be-193a-428e-ae80-03e6b55967d5'}, 'disk_bus': None, 'boot_index': 0, 'attachment_id': '0aeae21b-4271-4ab3-b124-9889a8dddcec', 'delete_on_termination': True, 'mount_device': '/dev/sda', 'device_type': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=61898) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 770.174877] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Root volume attach. Driver type: vmdk {{(pid=61898) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 770.175693] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca25df9-5339-4682-a13d-f92c8d9dc390 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.186600] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e05bf7-e039-46c4-b2c6-397edfef6b5b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.193973] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee581eb-7c73-4b15-a54c-eec6417b642d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.201702] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-d9f0acc2-4f72-4965-8a91-4920b66a66a6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.218654] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for the task: (returnval){ [ 770.218654] env[61898]: value = "task-1240532" [ 770.218654] env[61898]: _type = "Task" [ 770.218654] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.227543] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240532, 'name': RelocateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.358696] env[61898]: DEBUG nova.compute.utils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 770.365969] env[61898]: DEBUG nova.compute.manager [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Not allocating networking since 'none' was specified. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1983}} [ 770.441358] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240531, 'name': ReconfigVM_Task, 'duration_secs': 0.346905} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.441358] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 320577e5-f197-4f66-a94f-9b9ba2479325/320577e5-f197-4f66-a94f-9b9ba2479325.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.442745] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46d3120d-7462-4863-a397-44ac7bdc8b47 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.465566] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 770.465566] env[61898]: value = "task-1240533" [ 770.465566] env[61898]: _type = "Task" [ 770.465566] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.501074] env[61898]: DEBUG oslo_concurrency.lockutils [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] Releasing lock "refresh_cache-466cbf07-e945-48d4-a103-5a3ea2b7adf6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.501400] env[61898]: DEBUG nova.compute.manager [req-50f79d0f-bf0d-405c-ab0b-6ec3baf39bc0 req-23cc012e-f2fa-4c47-a4d5-8c02128250d2 service nova] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Received event network-vif-deleted-4a4f6aae-310b-4c24-8d85-e7b3058aa170 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 770.502557] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.605271] env[61898]: INFO nova.compute.manager [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Took 27.43 seconds to build instance. [ 770.732770] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240532, 'name': RelocateVM_Task} progress is 34%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.867317] env[61898]: DEBUG nova.compute.manager [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 770.889806] env[61898]: WARNING nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 9e6a3749-1974-4818-9cc6-76367d41b7e5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 770.890054] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 29eadea9-fa85-4f51-97d0-a941e1658094 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.890175] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance a0580308-d25b-47cb-9c1c-adb763be7925 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.890401] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 1fb4535d-47d8-45c5-b6d6-d05e57237b98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.890505] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 4c744673-0d9b-44ef-938f-372b101a2053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.890660] env[61898]: WARNING nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 34338563-05d4-477b-8480-6ef4cbf28e72 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 770.890817] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 1aa03975-f18f-4e64-836e-e991b73ee9d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.890953] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance aab10d8f-0d25-4351-a627-7222be63895e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.891264] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 45b8dc91-b577-4548-bf3a-32c7c936c616 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.891443] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 320577e5-f197-4f66-a94f-9b9ba2479325 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.891611] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 466cbf07-e945-48d4-a103-5a3ea2b7adf6 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.891730] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 5b51a1a5-7d54-4063-b680-e8b8b39fc46a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.891840] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 770.933051] env[61898]: DEBUG nova.compute.manager [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Received event network-vif-plugged-f605f253-f39d-4483-9ab2-634f7db2382b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 770.933295] env[61898]: DEBUG oslo_concurrency.lockutils [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] Acquiring lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.933863] env[61898]: DEBUG oslo_concurrency.lockutils [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] Lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.933863] env[61898]: DEBUG oslo_concurrency.lockutils [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] Lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.933863] env[61898]: DEBUG nova.compute.manager [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] No waiting events found dispatching network-vif-plugged-f605f253-f39d-4483-9ab2-634f7db2382b {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 770.934117] env[61898]: WARNING nova.compute.manager [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Received unexpected event network-vif-plugged-f605f253-f39d-4483-9ab2-634f7db2382b for instance with vm_state building and task_state spawning. [ 770.934348] env[61898]: DEBUG nova.compute.manager [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Received event network-changed-f605f253-f39d-4483-9ab2-634f7db2382b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 770.934348] env[61898]: DEBUG nova.compute.manager [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Refreshing instance network info cache due to event network-changed-f605f253-f39d-4483-9ab2-634f7db2382b. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 770.934597] env[61898]: DEBUG oslo_concurrency.lockutils [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] Acquiring lock "refresh_cache-5b51a1a5-7d54-4063-b680-e8b8b39fc46a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.934705] env[61898]: DEBUG oslo_concurrency.lockutils [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] Acquired lock "refresh_cache-5b51a1a5-7d54-4063-b680-e8b8b39fc46a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.935095] env[61898]: DEBUG nova.network.neutron [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Refreshing network info cache for port f605f253-f39d-4483-9ab2-634f7db2382b {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 770.980474] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240533, 'name': Rename_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.003517] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "86367a82-239b-4f6e-b306-d9661eadf95e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.003856] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "86367a82-239b-4f6e-b306-d9661eadf95e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.107800] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c7e180db-bc26-4d17-b272-24a7aab53c57 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 112.044s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.237067] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240532, 'name': RelocateVM_Task} progress is 43%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.395656] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance b106ab9e-08d4-4d18-90e0-13a071c9efb1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.475879] env[61898]: DEBUG nova.network.neutron [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.483281] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240533, 'name': Rename_Task, 'duration_secs': 0.579738} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.483559] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 771.483817] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0638fa9-374b-4ef1-9855-7ff24ceb2480 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.496064] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 771.496064] env[61898]: value = "task-1240534" [ 771.496064] env[61898]: _type = "Task" [ 771.496064] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.509376] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.588744] env[61898]: DEBUG nova.network.neutron [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.611237] env[61898]: DEBUG nova.compute.manager [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 771.742512] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240532, 'name': RelocateVM_Task} progress is 56%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.880543] env[61898]: DEBUG nova.compute.manager [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 771.901271] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance e851d73d-58f0-486a-a95c-70d07e5faad2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.923268] env[61898]: DEBUG nova.virt.hardware [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 771.923575] env[61898]: DEBUG nova.virt.hardware [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 771.923760] env[61898]: DEBUG nova.virt.hardware [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 771.923962] env[61898]: DEBUG nova.virt.hardware [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 771.924124] env[61898]: DEBUG nova.virt.hardware [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 771.924274] env[61898]: DEBUG nova.virt.hardware [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 771.924487] env[61898]: DEBUG nova.virt.hardware [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 771.924708] env[61898]: DEBUG nova.virt.hardware [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 771.924963] env[61898]: DEBUG nova.virt.hardware [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 771.925259] env[61898]: DEBUG nova.virt.hardware [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 771.925346] env[61898]: DEBUG nova.virt.hardware [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 771.926348] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fada576d-2da2-481a-983f-1d93b5383c48 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.938915] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6353bec9-f938-494b-9bd9-51ed4f5adf8a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.959090] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 771.962905] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Creating folder: Project (3d3c06f7aca3448992fc628810ae2774). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 771.963340] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86ce6b69-c461-467a-9ce5-43aa5ad1696f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.975412] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Created folder: Project (3d3c06f7aca3448992fc628810ae2774) in parent group-v267550. [ 771.975687] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Creating folder: Instances. Parent ref: group-v267605. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 771.975855] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b67fa6a-5b34-4533-892a-ee605488de58 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.990032] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Created folder: Instances in parent group-v267605. [ 771.990538] env[61898]: DEBUG oslo.service.loopingcall [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 771.990861] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 771.991187] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23cfc562-9d4f-40ce-b30f-49ffc77f78d7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.020177] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240534, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.021320] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.021320] env[61898]: value = "task-1240537" [ 772.021320] env[61898]: _type = "Task" [ 772.021320] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.036167] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240537, 'name': CreateVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.093013] env[61898]: DEBUG oslo_concurrency.lockutils [req-de1dc190-b2a4-4797-9c1b-4cb7d214d3a8 req-d4521492-17b8-44f3-b3ff-57d3cae346a2 service nova] Releasing lock "refresh_cache-5b51a1a5-7d54-4063-b680-e8b8b39fc46a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 772.140327] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.199249] env[61898]: DEBUG nova.compute.manager [req-ab0c2d27-8cc6-42ee-8153-edf7be77c48b req-8273d3d2-bae1-4952-9308-f3062ecd9b0a service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Received event network-vif-plugged-e15583c8-0ee4-4cbf-bf23-b74d594430fb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 772.199471] env[61898]: DEBUG oslo_concurrency.lockutils [req-ab0c2d27-8cc6-42ee-8153-edf7be77c48b req-8273d3d2-bae1-4952-9308-f3062ecd9b0a service nova] Acquiring lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.199681] env[61898]: DEBUG oslo_concurrency.lockutils [req-ab0c2d27-8cc6-42ee-8153-edf7be77c48b req-8273d3d2-bae1-4952-9308-f3062ecd9b0a service nova] Lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.199853] env[61898]: DEBUG oslo_concurrency.lockutils [req-ab0c2d27-8cc6-42ee-8153-edf7be77c48b req-8273d3d2-bae1-4952-9308-f3062ecd9b0a service nova] Lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.200720] env[61898]: DEBUG nova.compute.manager [req-ab0c2d27-8cc6-42ee-8153-edf7be77c48b req-8273d3d2-bae1-4952-9308-f3062ecd9b0a service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] No waiting events found dispatching network-vif-plugged-e15583c8-0ee4-4cbf-bf23-b74d594430fb {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 772.201051] env[61898]: WARNING nova.compute.manager [req-ab0c2d27-8cc6-42ee-8153-edf7be77c48b req-8273d3d2-bae1-4952-9308-f3062ecd9b0a service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Received unexpected event network-vif-plugged-e15583c8-0ee4-4cbf-bf23-b74d594430fb for instance with vm_state building and task_state spawning. [ 772.239918] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240532, 'name': RelocateVM_Task} progress is 71%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.410285] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 52a584e1-61ae-447d-90e0-e15d32a96314 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.415028] env[61898]: DEBUG nova.network.neutron [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Successfully updated port: e15583c8-0ee4-4cbf-bf23-b74d594430fb {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 772.519711] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240534, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.530150] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "e19e820c-154d-4e91-8631-dab9439d11a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.530435] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "e19e820c-154d-4e91-8631-dab9439d11a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.539213] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240537, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.736060] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240532, 'name': RelocateVM_Task} progress is 84%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.917801] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 7c6aad92-6e91-48fc-89ae-5ee4c89f449c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.919565] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "refresh_cache-5b51a1a5-7d54-4063-b680-e8b8b39fc46a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.919869] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquired lock "refresh_cache-5b51a1a5-7d54-4063-b680-e8b8b39fc46a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.919869] env[61898]: DEBUG nova.network.neutron [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 773.022580] env[61898]: DEBUG oslo_vmware.api [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240534, 'name': PowerOnVM_Task, 'duration_secs': 1.517942} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.022848] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 773.023041] env[61898]: INFO nova.compute.manager [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Took 8.57 seconds to spawn the instance on the hypervisor. [ 773.023223] env[61898]: DEBUG nova.compute.manager [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 773.024126] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51b795a-2588-44af-8b36-e025550a1199 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.043370] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240537, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.236650] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240532, 'name': RelocateVM_Task} progress is 97%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.422463] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance d6c96dce-13ae-411a-b52a-fee484718a8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 773.481457] env[61898]: DEBUG nova.network.neutron [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.537791] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240537, 'name': CreateVM_Task, 'duration_secs': 1.28504} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.538331] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.539178] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.539566] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.540112] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 773.540657] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed7dac0f-d8b0-4fbd-939e-bbc0b9c23a96 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.551335] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 773.551335] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52aed6e6-a1f3-55f3-897b-d3dbf0240ff7" [ 773.551335] env[61898]: _type = "Task" [ 773.551335] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.555207] env[61898]: INFO nova.compute.manager [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Took 27.73 seconds to build instance. [ 773.559759] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52aed6e6-a1f3-55f3-897b-d3dbf0240ff7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.736135] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240532, 'name': RelocateVM_Task} progress is 97%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.846417] env[61898]: DEBUG nova.network.neutron [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Updating instance_info_cache with network_info: [{"id": "f605f253-f39d-4483-9ab2-634f7db2382b", "address": "fa:16:3e:b9:b2:a9", "network": {"id": "548bba5e-89e4-489d-92d7-d41c4072cb20", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1323681802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "294362a0c6b04039b589ae5eb0d341ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf605f253-f3", "ovs_interfaceid": "f605f253-f39d-4483-9ab2-634f7db2382b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e15583c8-0ee4-4cbf-bf23-b74d594430fb", "address": "fa:16:3e:08:8d:c4", "network": {"id": "48663ea5-5737-44ff-baaf-9384561502cb", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-437424685", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "294362a0c6b04039b589ae5eb0d341ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape15583c8-0e", "ovs_interfaceid": "e15583c8-0ee4-4cbf-bf23-b74d594430fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.926907] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance b709df92-bf56-40ed-ba48-a8fa19be8b68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.062631] env[61898]: DEBUG oslo_concurrency.lockutils [None req-647ad66d-72da-4e7a-b78d-9c886b1ba0ad tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "320577e5-f197-4f66-a94f-9b9ba2479325" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.949s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 774.062631] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52aed6e6-a1f3-55f3-897b-d3dbf0240ff7, 'name': SearchDatastore_Task, 'duration_secs': 0.042902} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.063765] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.064031] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.064293] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.064448] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.064626] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.065092] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e6c1dd2-70ac-4bb8-b01d-4321c8a52f3a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.081746] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.081941] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 774.082678] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc403a92-a9f8-4ac6-8985-ae30b93f418d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.088390] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 774.088390] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5295699f-bb0e-5ff1-452e-6837833d5fd6" [ 774.088390] env[61898]: _type = "Task" [ 774.088390] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.095871] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5295699f-bb0e-5ff1-452e-6837833d5fd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.237908] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240532, 'name': RelocateVM_Task} progress is 97%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.270311] env[61898]: DEBUG nova.compute.manager [req-aae2d31a-e24a-4d01-ba03-8034b9da5c54 req-d27c916a-ba2e-47d8-988a-876b791a3327 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Received event network-changed-e15583c8-0ee4-4cbf-bf23-b74d594430fb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 774.270420] env[61898]: DEBUG nova.compute.manager [req-aae2d31a-e24a-4d01-ba03-8034b9da5c54 req-d27c916a-ba2e-47d8-988a-876b791a3327 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Refreshing instance network info cache due to event network-changed-e15583c8-0ee4-4cbf-bf23-b74d594430fb. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 774.270644] env[61898]: DEBUG oslo_concurrency.lockutils [req-aae2d31a-e24a-4d01-ba03-8034b9da5c54 req-d27c916a-ba2e-47d8-988a-876b791a3327 service nova] Acquiring lock "refresh_cache-5b51a1a5-7d54-4063-b680-e8b8b39fc46a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.349012] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Releasing lock "refresh_cache-5b51a1a5-7d54-4063-b680-e8b8b39fc46a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.349490] env[61898]: DEBUG nova.compute.manager [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Instance network_info: |[{"id": "f605f253-f39d-4483-9ab2-634f7db2382b", "address": "fa:16:3e:b9:b2:a9", "network": {"id": "548bba5e-89e4-489d-92d7-d41c4072cb20", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1323681802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "294362a0c6b04039b589ae5eb0d341ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf605f253-f3", "ovs_interfaceid": "f605f253-f39d-4483-9ab2-634f7db2382b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e15583c8-0ee4-4cbf-bf23-b74d594430fb", "address": "fa:16:3e:08:8d:c4", "network": {"id": "48663ea5-5737-44ff-baaf-9384561502cb", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-437424685", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "294362a0c6b04039b589ae5eb0d341ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape15583c8-0e", "ovs_interfaceid": "e15583c8-0ee4-4cbf-bf23-b74d594430fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 774.349805] env[61898]: DEBUG oslo_concurrency.lockutils [req-aae2d31a-e24a-4d01-ba03-8034b9da5c54 req-d27c916a-ba2e-47d8-988a-876b791a3327 service nova] Acquired lock "refresh_cache-5b51a1a5-7d54-4063-b680-e8b8b39fc46a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.349984] env[61898]: DEBUG nova.network.neutron [req-aae2d31a-e24a-4d01-ba03-8034b9da5c54 req-d27c916a-ba2e-47d8-988a-876b791a3327 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Refreshing network info cache for port e15583c8-0ee4-4cbf-bf23-b74d594430fb {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.351552] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:b2:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f605f253-f39d-4483-9ab2-634f7db2382b', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:8d:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fd77ecbc-aaaf-45f4-ae8f-977d90e4052f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e15583c8-0ee4-4cbf-bf23-b74d594430fb', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 774.363511] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Creating folder: Project (294362a0c6b04039b589ae5eb0d341ee). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 774.364702] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a352376c-7521-47ca-a87a-7c0878c24965 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.375677] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Created folder: Project (294362a0c6b04039b589ae5eb0d341ee) in parent group-v267550. [ 774.375937] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Creating folder: Instances. Parent ref: group-v267608. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 774.376248] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7c0e45d-ae91-4ef9-956e-fb4f00cd4219 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.385366] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Created folder: Instances in parent group-v267608. [ 774.385598] env[61898]: DEBUG oslo.service.loopingcall [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 774.385778] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 774.385975] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b48af938-7235-466c-9af8-2bc31da90826 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.409536] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 774.409536] env[61898]: value = "task-1240540" [ 774.409536] env[61898]: _type = "Task" [ 774.409536] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.419764] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240540, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.429678] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 4db53fdf-7107-43c5-a57c-65d54b807909 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.566239] env[61898]: DEBUG nova.compute.manager [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 774.600635] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5295699f-bb0e-5ff1-452e-6837833d5fd6, 'name': SearchDatastore_Task, 'duration_secs': 0.053222} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.601497] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b71345b3-69f5-4395-94c2-226898db31ff {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.609694] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 774.609694] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52309ce8-693c-012d-157e-238cfe9e0598" [ 774.609694] env[61898]: _type = "Task" [ 774.609694] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.617737] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52309ce8-693c-012d-157e-238cfe9e0598, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.739563] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240532, 'name': RelocateVM_Task} progress is 98%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.920168] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240540, 'name': CreateVM_Task, 'duration_secs': 0.426974} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.920359] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 774.921122] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.921341] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.921588] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 774.921830] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da09619d-87cc-4b35-99f8-5b57541b9511 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.926320] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for the task: (returnval){ [ 774.926320] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d57f85-2ad5-4dd1-b2a4-4586d2dc2fac" [ 774.926320] env[61898]: _type = "Task" [ 774.926320] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.934587] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 070bc0cc-ff77-48b8-bd08-f17fe69e25af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 774.935825] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d57f85-2ad5-4dd1-b2a4-4586d2dc2fac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.085020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.122089] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52309ce8-693c-012d-157e-238cfe9e0598, 'name': SearchDatastore_Task, 'duration_secs': 0.00966} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.122370] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.122626] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e/ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 775.122933] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e30c064-4b90-4c72-adab-1ab55a0f9afb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.129933] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 775.129933] env[61898]: value = "task-1240541" [ 775.129933] env[61898]: _type = "Task" [ 775.129933] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.137658] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.192461] env[61898]: DEBUG nova.network.neutron [req-aae2d31a-e24a-4d01-ba03-8034b9da5c54 req-d27c916a-ba2e-47d8-988a-876b791a3327 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Updated VIF entry in instance network info cache for port e15583c8-0ee4-4cbf-bf23-b74d594430fb. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 775.192912] env[61898]: DEBUG nova.network.neutron [req-aae2d31a-e24a-4d01-ba03-8034b9da5c54 req-d27c916a-ba2e-47d8-988a-876b791a3327 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Updating instance_info_cache with network_info: [{"id": "f605f253-f39d-4483-9ab2-634f7db2382b", "address": "fa:16:3e:b9:b2:a9", "network": {"id": "548bba5e-89e4-489d-92d7-d41c4072cb20", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1323681802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "294362a0c6b04039b589ae5eb0d341ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf605f253-f3", "ovs_interfaceid": "f605f253-f39d-4483-9ab2-634f7db2382b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e15583c8-0ee4-4cbf-bf23-b74d594430fb", "address": "fa:16:3e:08:8d:c4", "network": {"id": "48663ea5-5737-44ff-baaf-9384561502cb", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-437424685", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.58", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "294362a0c6b04039b589ae5eb0d341ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape15583c8-0e", "ovs_interfaceid": "e15583c8-0ee4-4cbf-bf23-b74d594430fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.240469] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240532, 'name': RelocateVM_Task, 'duration_secs': 4.561646} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.240779] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Volume attach. Driver type: vmdk {{(pid=61898) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 775.240997] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267575', 'volume_id': '5e4f66be-193a-428e-ae80-03e6b55967d5', 'name': 'volume-5e4f66be-193a-428e-ae80-03e6b55967d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '466cbf07-e945-48d4-a103-5a3ea2b7adf6', 'attached_at': '', 'detached_at': '', 'volume_id': '5e4f66be-193a-428e-ae80-03e6b55967d5', 'serial': '5e4f66be-193a-428e-ae80-03e6b55967d5'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 775.241750] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cc2058-c819-4ce3-9973-0e2f88ed79d0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.256936] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d774a7ed-fce5-41fb-95d6-4a91f6c45229 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.278825] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] volume-5e4f66be-193a-428e-ae80-03e6b55967d5/volume-5e4f66be-193a-428e-ae80-03e6b55967d5.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 775.279134] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-399ec1aa-dc47-4eb6-9f03-64ffe906c69e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.299703] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for the task: (returnval){ [ 775.299703] env[61898]: value = "task-1240542" [ 775.299703] env[61898]: _type = "Task" [ 775.299703] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.308395] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240542, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.436606] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d57f85-2ad5-4dd1-b2a4-4586d2dc2fac, 'name': SearchDatastore_Task, 'duration_secs': 0.035202} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.436986] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.437235] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 775.437462] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.437606] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.437776] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 775.438498] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 626caecc-6389-4064-aafd-9968cee262ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 775.439620] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-741d7335-21e5-422f-877a-782f3a40820c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.447169] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 775.447339] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 775.448016] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28731f46-6af8-4b47-a2c5-a18b84227b55 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.453010] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for the task: (returnval){ [ 775.453010] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]529697fb-4d2d-96dc-533e-31bcde0e4d21" [ 775.453010] env[61898]: _type = "Task" [ 775.453010] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.460636] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]529697fb-4d2d-96dc-533e-31bcde0e4d21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.644195] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240541, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.696318] env[61898]: DEBUG oslo_concurrency.lockutils [req-aae2d31a-e24a-4d01-ba03-8034b9da5c54 req-d27c916a-ba2e-47d8-988a-876b791a3327 service nova] Releasing lock "refresh_cache-5b51a1a5-7d54-4063-b680-e8b8b39fc46a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.812079] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240542, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.921485] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.921754] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.942770] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 86367a82-239b-4f6e-b306-d9661eadf95e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 775.943071] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 775.943243] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 775.963139] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]529697fb-4d2d-96dc-533e-31bcde0e4d21, 'name': SearchDatastore_Task, 'duration_secs': 0.009552} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.967307] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e7f3040-98d8-4cb1-a871-17b5ead6d978 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.973231] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for the task: (returnval){ [ 775.973231] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5206af2c-5120-b632-82bd-2c68cf8134a6" [ 775.973231] env[61898]: _type = "Task" [ 775.973231] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.980946] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5206af2c-5120-b632-82bd-2c68cf8134a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.141038] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.741128} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.143580] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e/ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 776.143808] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 776.144243] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b68ec2e4-610f-41c9-a5d9-5cae6b9707eb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.150424] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 776.150424] env[61898]: value = "task-1240543" [ 776.150424] env[61898]: _type = "Task" [ 776.150424] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.159715] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.258318] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a972e239-04d7-48e8-80d3-f404df8786b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.266191] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c78aff5-7d04-4070-829a-5f3e27d1d276 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.298165] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b3eef0-a16a-4f31-bf72-125a3afdccf9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.313173] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfddf9c-cdf0-4a98-bd18-f4bf77e7480f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.317588] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240542, 'name': ReconfigVM_Task, 'duration_secs': 0.789741} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.317940] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Reconfigured VM instance instance-0000003b to attach disk [datastore1] volume-5e4f66be-193a-428e-ae80-03e6b55967d5/volume-5e4f66be-193a-428e-ae80-03e6b55967d5.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 776.324218] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a97e0d39-c991-465d-81de-d0288c84f048 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.343296] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 776.354049] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for the task: (returnval){ [ 776.354049] env[61898]: value = "task-1240544" [ 776.354049] env[61898]: _type = "Task" [ 776.354049] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.371738] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240544, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.488021] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5206af2c-5120-b632-82bd-2c68cf8134a6, 'name': SearchDatastore_Task, 'duration_secs': 0.022908} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.488140] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.488308] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 5b51a1a5-7d54-4063-b680-e8b8b39fc46a/5b51a1a5-7d54-4063-b680-e8b8b39fc46a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 776.488578] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e2adc9a-9926-46bb-a5a8-ad3f73347cab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.495194] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for the task: (returnval){ [ 776.495194] env[61898]: value = "task-1240545" [ 776.495194] env[61898]: _type = "Task" [ 776.495194] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.503013] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240545, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.660035] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240543, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.175379} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.660405] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 776.661168] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c64d09b-189a-40f2-8343-035863f4d0e6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.680972] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e/ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 776.681265] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9eaab928-d330-4238-82a1-1a3c158d636a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.701087] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 776.701087] env[61898]: value = "task-1240546" [ 776.701087] env[61898]: _type = "Task" [ 776.701087] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.709051] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240546, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.866305] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240544, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.872412] env[61898]: ERROR nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [req-5e10fc3f-561d-4e71-95b6-a4ebed754543] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 79886f75-94e9-4bf0-9cbd-87f3715d3144. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5e10fc3f-561d-4e71-95b6-a4ebed754543"}]} [ 776.887784] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Refreshing inventories for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 776.901405] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating ProviderTree inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 776.901557] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 776.912054] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Refreshing aggregate associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, aggregates: None {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 776.929444] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Refreshing trait associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 777.005208] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240545, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.210996] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240546, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.302922] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11b19c7-a1ed-4406-9fac-9447e31484bd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.310260] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff405ad-13e4-45f6-aec4-72744733c7d4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.344668] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498e5386-0674-4cf2-a53f-f3060751b7e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.356141] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82c53c9-8f32-4776-b60c-f20900ea83dc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.370249] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 777.377081] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240544, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.507904] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240545, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.712541] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240546, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.866955] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240544, 'name': ReconfigVM_Task, 'duration_secs': 1.101829} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.867267] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267575', 'volume_id': '5e4f66be-193a-428e-ae80-03e6b55967d5', 'name': 'volume-5e4f66be-193a-428e-ae80-03e6b55967d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '466cbf07-e945-48d4-a103-5a3ea2b7adf6', 'attached_at': '', 'detached_at': '', 'volume_id': '5e4f66be-193a-428e-ae80-03e6b55967d5', 'serial': '5e4f66be-193a-428e-ae80-03e6b55967d5'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 777.867792] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29bbe312-35ff-4779-9577-e8202bc58fde {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.876415] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for the task: (returnval){ [ 777.876415] env[61898]: value = "task-1240547" [ 777.876415] env[61898]: _type = "Task" [ 777.876415] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.884585] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240547, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.902503] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updated inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 777.902754] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 generation from 81 to 82 during operation: update_inventory {{(pid=61898) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 777.902905] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 778.008026] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240545, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.244281} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.008359] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 5b51a1a5-7d54-4063-b680-e8b8b39fc46a/5b51a1a5-7d54-4063-b680-e8b8b39fc46a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 778.008588] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 778.008846] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6cdbcfd7-fca7-4668-8adf-535539f85946 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.014834] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for the task: (returnval){ [ 778.014834] env[61898]: value = "task-1240548" [ 778.014834] env[61898]: _type = "Task" [ 778.014834] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.022283] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240548, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.212834] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240546, 'name': ReconfigVM_Task, 'duration_secs': 1.302097} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.213155] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Reconfigured VM instance instance-0000003d to attach disk [datastore1] ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e/ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.213774] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-afed1ecc-8562-4195-a410-ff4c3ec25c95 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.220264] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 778.220264] env[61898]: value = "task-1240549" [ 778.220264] env[61898]: _type = "Task" [ 778.220264] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.228016] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240549, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.386270] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240547, 'name': Rename_Task, 'duration_secs': 0.163206} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.386570] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 778.386815] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee2dd1d6-c875-45df-b66c-f3d0d755e87f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.393256] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for the task: (returnval){ [ 778.393256] env[61898]: value = "task-1240550" [ 778.393256] env[61898]: _type = "Task" [ 778.393256] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.401051] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240550, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.408665] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 778.409922] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.554s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.409922] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.317s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.409922] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.411541] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.114s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.413125] env[61898]: INFO nova.compute.claims [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.415771] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 778.415918] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Cleaning up deleted instances {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11557}} [ 778.439946] env[61898]: INFO nova.scheduler.client.report [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Deleted allocations for instance 9e6a3749-1974-4818-9cc6-76367d41b7e5 [ 778.524911] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240548, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.259408} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.524911] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 778.525540] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e0d1b0-d971-497c-8e96-e894713df4d2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.550036] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 5b51a1a5-7d54-4063-b680-e8b8b39fc46a/5b51a1a5-7d54-4063-b680-e8b8b39fc46a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 778.550375] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e96834bd-c284-41ef-aecb-9f1565227101 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.570205] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for the task: (returnval){ [ 778.570205] env[61898]: value = "task-1240551" [ 778.570205] env[61898]: _type = "Task" [ 778.570205] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.578378] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240551, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.729974] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240549, 'name': Rename_Task, 'duration_secs': 0.151429} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.731732] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 778.731732] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3cd7c57c-d7bb-4b7b-a91b-de8156178f6e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.737412] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 778.737412] env[61898]: value = "task-1240552" [ 778.737412] env[61898]: _type = "Task" [ 778.737412] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.745502] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.903018] env[61898]: DEBUG oslo_vmware.api [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240550, 'name': PowerOnVM_Task, 'duration_secs': 0.458883} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.903445] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 778.903760] env[61898]: INFO nova.compute.manager [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Took 10.20 seconds to spawn the instance on the hypervisor. [ 778.904061] env[61898]: DEBUG nova.compute.manager [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 778.905204] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d54112e-6c49-4239-a51b-4285212bf8cf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.923283] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] There are 6 instances to clean {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11566}} [ 778.923631] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 81fd9ccc-a267-498d-93d4-8adf894ee8d8] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 778.953329] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f7294778-a965-4829-8288-b560dc345c9a tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "9e6a3749-1974-4818-9cc6-76367d41b7e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.723s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.082009] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240551, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.249985] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240552, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.426028] env[61898]: INFO nova.compute.manager [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Took 31.79 seconds to build instance. [ 779.427059] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 23a0d825-3132-44d5-8b73-a06a0c0e7b1a] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 779.583621] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240551, 'name': ReconfigVM_Task, 'duration_secs': 0.657749} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.583937] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 5b51a1a5-7d54-4063-b680-e8b8b39fc46a/5b51a1a5-7d54-4063-b680-e8b8b39fc46a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 779.585304] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-434512df-7edf-400e-9a67-bae04fe866bb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.593758] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for the task: (returnval){ [ 779.593758] env[61898]: value = "task-1240553" [ 779.593758] env[61898]: _type = "Task" [ 779.593758] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.604986] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240553, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.744048] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006b8388-ddb4-426d-a8c5-36c41f18224f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.752073] env[61898]: DEBUG oslo_vmware.api [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240552, 'name': PowerOnVM_Task, 'duration_secs': 0.688255} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.754558] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 779.755429] env[61898]: INFO nova.compute.manager [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Took 7.87 seconds to spawn the instance on the hypervisor. [ 779.755429] env[61898]: DEBUG nova.compute.manager [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 779.755893] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1972e5f-a06b-4730-9a24-150c160c7752 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.759038] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e7f7f1-48bf-4148-a412-478d3a0e9ece {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.792614] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839d79ab-be40-44da-8989-243471b622d5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.801042] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4cd034a-c3fe-46ad-80f4-ebe84282c320 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.815288] env[61898]: DEBUG nova.compute.provider_tree [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.929072] env[61898]: DEBUG oslo_concurrency.lockutils [None req-af8b1fe3-dc48-483c-80bf-89e03b4431c6 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.448s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.929475] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 7ef91986-fb46-478b-85a5-05d597790ad9] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 780.105734] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240553, 'name': Rename_Task, 'duration_secs': 0.145047} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.106046] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 780.106298] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49c887b5-79ca-4995-a4b3-0cd8bc136822 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.111714] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for the task: (returnval){ [ 780.111714] env[61898]: value = "task-1240554" [ 780.111714] env[61898]: _type = "Task" [ 780.111714] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.119717] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240554, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.301589] env[61898]: INFO nova.compute.manager [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Took 28.40 seconds to build instance. [ 780.318882] env[61898]: DEBUG nova.scheduler.client.report [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 780.433216] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 0dfabd80-a385-4124-af33-083559819d7a] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 780.435271] env[61898]: DEBUG nova.compute.manager [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 780.622376] env[61898]: DEBUG oslo_vmware.api [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240554, 'name': PowerOnVM_Task, 'duration_secs': 0.470184} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.622633] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 780.622864] env[61898]: INFO nova.compute.manager [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Took 11.24 seconds to spawn the instance on the hypervisor. [ 780.623062] env[61898]: DEBUG nova.compute.manager [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 780.623849] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b46ebe0-0b7e-4d8c-a5e4-937032786600 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.807998] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6ab9f8df-e9e5-4b2d-b686-337006d86cd1 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.453s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.807998] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "29eadea9-fa85-4f51-97d0-a941e1658094" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.807998] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "29eadea9-fa85-4f51-97d0-a941e1658094" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.808691] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "29eadea9-fa85-4f51-97d0-a941e1658094-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.808691] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "29eadea9-fa85-4f51-97d0-a941e1658094-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.808783] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "29eadea9-fa85-4f51-97d0-a941e1658094-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.812559] env[61898]: INFO nova.compute.manager [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Terminating instance [ 780.824695] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.825269] env[61898]: DEBUG nova.compute.manager [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 780.828625] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.426s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.830866] env[61898]: INFO nova.compute.claims [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 780.847740] env[61898]: INFO nova.compute.manager [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Rebuilding instance [ 780.898252] env[61898]: DEBUG nova.compute.manager [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 780.899150] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f71e328-7be1-47a5-8d58-58bb02ae5504 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.944083] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 41ac9f9b-5cd3-4302-86ac-8ef7cae603b6] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 780.968108] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.000207] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "a0580308-d25b-47cb-9c1c-adb763be7925" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.000207] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "a0580308-d25b-47cb-9c1c-adb763be7925" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.000207] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "a0580308-d25b-47cb-9c1c-adb763be7925-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.000207] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "a0580308-d25b-47cb-9c1c-adb763be7925-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.000207] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "a0580308-d25b-47cb-9c1c-adb763be7925-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.002270] env[61898]: INFO nova.compute.manager [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Terminating instance [ 781.143112] env[61898]: INFO nova.compute.manager [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Took 31.49 seconds to build instance. [ 781.317251] env[61898]: DEBUG nova.compute.manager [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 781.321030] env[61898]: DEBUG nova.compute.manager [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 781.321030] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 781.321030] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9936fd-5f6e-4f34-b8ae-2ad5269e508e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.330195] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 781.330493] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3575e9d9-6212-47b7-884b-fc2e3a382b26 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.335179] env[61898]: DEBUG nova.compute.utils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 781.341288] env[61898]: DEBUG nova.compute.manager [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 781.341288] env[61898]: DEBUG nova.network.neutron [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 781.342730] env[61898]: DEBUG oslo_vmware.api [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 781.342730] env[61898]: value = "task-1240555" [ 781.342730] env[61898]: _type = "Task" [ 781.342730] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.355992] env[61898]: DEBUG oslo_vmware.api [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240555, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.369263] env[61898]: DEBUG nova.compute.manager [req-e2d10c37-f77a-4f9d-b8e2-7e1e70705207 req-04197842-f925-4ebd-a81a-021a236dce56 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Received event network-changed-f036aa10-aacf-4943-b51b-28b2693d3448 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 781.373023] env[61898]: DEBUG nova.compute.manager [req-e2d10c37-f77a-4f9d-b8e2-7e1e70705207 req-04197842-f925-4ebd-a81a-021a236dce56 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Refreshing instance network info cache due to event network-changed-f036aa10-aacf-4943-b51b-28b2693d3448. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 781.373023] env[61898]: DEBUG oslo_concurrency.lockutils [req-e2d10c37-f77a-4f9d-b8e2-7e1e70705207 req-04197842-f925-4ebd-a81a-021a236dce56 service nova] Acquiring lock "refresh_cache-466cbf07-e945-48d4-a103-5a3ea2b7adf6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.373023] env[61898]: DEBUG oslo_concurrency.lockutils [req-e2d10c37-f77a-4f9d-b8e2-7e1e70705207 req-04197842-f925-4ebd-a81a-021a236dce56 service nova] Acquired lock "refresh_cache-466cbf07-e945-48d4-a103-5a3ea2b7adf6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.373023] env[61898]: DEBUG nova.network.neutron [req-e2d10c37-f77a-4f9d-b8e2-7e1e70705207 req-04197842-f925-4ebd-a81a-021a236dce56 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Refreshing network info cache for port f036aa10-aacf-4943-b51b-28b2693d3448 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 781.429964] env[61898]: DEBUG nova.policy [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a910d0cdf3cd4b17af818abd25a38b79', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ce0562f486e44cc877c1cc31525a13a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 781.452791] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 8a5c9847-fc0d-41f7-87b8-d7ff44073ea9] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 781.504731] env[61898]: DEBUG nova.compute.manager [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 781.505041] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 781.506125] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec013f71-6a48-4ae0-8d24-11beb90b8f0f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.518044] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 781.518044] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0859583f-8fe1-4956-b5fd-31791bafabf8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.528529] env[61898]: DEBUG oslo_vmware.api [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 781.528529] env[61898]: value = "task-1240556" [ 781.528529] env[61898]: _type = "Task" [ 781.528529] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.537073] env[61898]: DEBUG oslo_vmware.api [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.644470] env[61898]: DEBUG oslo_concurrency.lockutils [None req-204911d5-e785-4d40-80dc-c1832f793f52 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.215s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.804268] env[61898]: DEBUG nova.network.neutron [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Successfully created port: 238d786d-b9f2-4cbb-86ed-2508303aa88f {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.845092] env[61898]: DEBUG nova.compute.manager [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 781.857462] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.868048] env[61898]: DEBUG oslo_vmware.api [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240555, 'name': PowerOffVM_Task, 'duration_secs': 0.34822} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.868915] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 781.870344] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 781.870344] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb2a0538-f86b-4dac-9605-e894ee50893c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.916708] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 781.917309] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50e71e8b-b47e-4674-8101-8ff3c6a744bb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.926865] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 781.926865] env[61898]: value = "task-1240558" [ 781.926865] env[61898]: _type = "Task" [ 781.926865] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.931786] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 781.931972] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 781.932222] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Deleting the datastore file [datastore2] 29eadea9-fa85-4f51-97d0-a941e1658094 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 781.932927] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-654a3b45-4059-429b-a45c-adc99e3d5c9e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.938320] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.946108] env[61898]: DEBUG oslo_vmware.api [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 781.946108] env[61898]: value = "task-1240559" [ 781.946108] env[61898]: _type = "Task" [ 781.946108] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.955287] env[61898]: DEBUG oslo_vmware.api [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240559, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.958448] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.958666] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Cleaning up deleted instances with incomplete migration {{(pid=61898) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11595}} [ 782.039888] env[61898]: DEBUG oslo_vmware.api [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240556, 'name': PowerOffVM_Task, 'duration_secs': 0.168475} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.040279] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 782.040731] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 782.044261] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28065374-1a3b-454e-8408-4efe4fe9206c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.109220] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 782.109813] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 782.110020] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Deleting the datastore file [datastore2] a0580308-d25b-47cb-9c1c-adb763be7925 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 782.111197] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a831a418-4d96-4653-946d-9bbe36337510 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.123638] env[61898]: DEBUG oslo_vmware.api [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for the task: (returnval){ [ 782.123638] env[61898]: value = "task-1240561" [ 782.123638] env[61898]: _type = "Task" [ 782.123638] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.145501] env[61898]: DEBUG oslo_vmware.api [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.147187] env[61898]: DEBUG nova.compute.manager [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 782.160187] env[61898]: DEBUG nova.network.neutron [req-e2d10c37-f77a-4f9d-b8e2-7e1e70705207 req-04197842-f925-4ebd-a81a-021a236dce56 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Updated VIF entry in instance network info cache for port f036aa10-aacf-4943-b51b-28b2693d3448. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 782.160551] env[61898]: DEBUG nova.network.neutron [req-e2d10c37-f77a-4f9d-b8e2-7e1e70705207 req-04197842-f925-4ebd-a81a-021a236dce56 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Updating instance_info_cache with network_info: [{"id": "f036aa10-aacf-4943-b51b-28b2693d3448", "address": "fa:16:3e:18:67:05", "network": {"id": "eb434da2-9ae2-4942-bfc8-9fcfafa028e3", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2040918802-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.240", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8d19464641434cc7a06ca4eed2d5bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db00ec2e-3155-46b6-8170-082f7d86dbe7", "external-id": "nsx-vlan-transportzone-332", "segmentation_id": 332, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf036aa10-aa", "ovs_interfaceid": "f036aa10-aacf-4943-b51b-28b2693d3448", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.246264] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ced40f-ee9b-48c2-8496-442dc4c900aa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.254911] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5059265-9b45-446b-af4b-d7c0a3da1724 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.290251] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2a240c-f821-48c8-9b0f-077ea0d13677 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.298634] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b9bcbf-5266-4a51-9e72-c10888a65d9c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.316857] env[61898]: DEBUG nova.compute.provider_tree [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.437420] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240558, 'name': PowerOffVM_Task, 'duration_secs': 0.194963} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.437700] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 782.437927] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 782.438776] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7235b389-79c5-4e89-83f1-d79bac3bcd62 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.445657] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 782.445905] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1efb07b9-67e5-4560-99ef-b9e818ae1f5c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.455257] env[61898]: DEBUG oslo_vmware.api [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240559, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241423} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.455482] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 782.455662] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 782.455899] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 782.455984] env[61898]: INFO nova.compute.manager [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Took 1.14 seconds to destroy the instance on the hypervisor. [ 782.456228] env[61898]: DEBUG oslo.service.loopingcall [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 782.456411] env[61898]: DEBUG nova.compute.manager [-] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 782.456518] env[61898]: DEBUG nova.network.neutron [-] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 782.461074] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.472039] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 782.472039] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 782.472039] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Deleting the datastore file [datastore1] ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 782.472039] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2cb334f6-dbe3-4112-8fd3-2928c4046856 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.477583] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 782.477583] env[61898]: value = "task-1240563" [ 782.477583] env[61898]: _type = "Task" [ 782.477583] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.485558] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.638479] env[61898]: DEBUG oslo_concurrency.lockutils [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.638790] env[61898]: DEBUG oslo_concurrency.lockutils [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.639100] env[61898]: DEBUG oslo_concurrency.lockutils [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.639415] env[61898]: DEBUG oslo_concurrency.lockutils [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.639698] env[61898]: DEBUG oslo_concurrency.lockutils [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.642079] env[61898]: DEBUG oslo_vmware.api [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Task: {'id': task-1240561, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.429399} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.642463] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 782.642688] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 782.642949] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 782.643320] env[61898]: INFO nova.compute.manager [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Took 1.14 seconds to destroy the instance on the hypervisor. [ 782.643543] env[61898]: DEBUG oslo.service.loopingcall [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 782.644297] env[61898]: INFO nova.compute.manager [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Terminating instance [ 782.646292] env[61898]: DEBUG nova.compute.manager [-] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 782.646383] env[61898]: DEBUG nova.network.neutron [-] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 782.668644] env[61898]: DEBUG oslo_concurrency.lockutils [req-e2d10c37-f77a-4f9d-b8e2-7e1e70705207 req-04197842-f925-4ebd-a81a-021a236dce56 service nova] Releasing lock "refresh_cache-466cbf07-e945-48d4-a103-5a3ea2b7adf6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.676483] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.796948] env[61898]: DEBUG nova.compute.manager [req-c22a0d7e-9bb0-4b3a-8c9a-00bdc1c6ffa4 req-f84ea50b-8d83-4c13-a149-38687c5be8f9 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Received event network-vif-deleted-3f1074c7-8a0c-43aa-876a-aeccfb82877f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 782.797126] env[61898]: INFO nova.compute.manager [req-c22a0d7e-9bb0-4b3a-8c9a-00bdc1c6ffa4 req-f84ea50b-8d83-4c13-a149-38687c5be8f9 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Neutron deleted interface 3f1074c7-8a0c-43aa-876a-aeccfb82877f; detaching it from the instance and deleting it from the info cache [ 782.797313] env[61898]: DEBUG nova.network.neutron [req-c22a0d7e-9bb0-4b3a-8c9a-00bdc1c6ffa4 req-f84ea50b-8d83-4c13-a149-38687c5be8f9 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.820570] env[61898]: DEBUG nova.scheduler.client.report [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 782.863254] env[61898]: DEBUG nova.compute.manager [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 782.907864] env[61898]: DEBUG nova.virt.hardware [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 782.908050] env[61898]: DEBUG nova.virt.hardware [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 782.908340] env[61898]: DEBUG nova.virt.hardware [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.908551] env[61898]: DEBUG nova.virt.hardware [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 782.909023] env[61898]: DEBUG nova.virt.hardware [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.909023] env[61898]: DEBUG nova.virt.hardware [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 782.909177] env[61898]: DEBUG nova.virt.hardware [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 782.909224] env[61898]: DEBUG nova.virt.hardware [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 782.909404] env[61898]: DEBUG nova.virt.hardware [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 782.909522] env[61898]: DEBUG nova.virt.hardware [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 782.909691] env[61898]: DEBUG nova.virt.hardware [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 782.910671] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2be00ed-09ea-448d-b72f-2a7dfd2715b8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.919067] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676baf00-b18a-4ab4-9c98-fdb12866ee7a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.987339] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.401341} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.987617] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 782.987805] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 782.987974] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 783.151429] env[61898]: DEBUG nova.compute.manager [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 783.151715] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 783.153152] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-590ffe0b-aec1-41c6-b8ae-63695d7d45be {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.161043] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 783.161288] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fe6d360-59db-4828-9003-b7b1805b9bff {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.167200] env[61898]: DEBUG oslo_vmware.api [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for the task: (returnval){ [ 783.167200] env[61898]: value = "task-1240564" [ 783.167200] env[61898]: _type = "Task" [ 783.167200] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.175377] env[61898]: DEBUG oslo_vmware.api [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240564, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.264128] env[61898]: DEBUG nova.network.neutron [-] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.301396] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-078ef45e-2adc-49b6-9ac4-49bd4ee5949a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.311800] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c61bea8-f11f-4372-8f71-5c2e4f2c02ad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.325727] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.497s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.326278] env[61898]: DEBUG nova.compute.manager [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 783.329910] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.108s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.331364] env[61898]: INFO nova.compute.claims [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 783.342740] env[61898]: DEBUG nova.compute.manager [req-c22a0d7e-9bb0-4b3a-8c9a-00bdc1c6ffa4 req-f84ea50b-8d83-4c13-a149-38687c5be8f9 service nova] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Detach interface failed, port_id=3f1074c7-8a0c-43aa-876a-aeccfb82877f, reason: Instance 29eadea9-fa85-4f51-97d0-a941e1658094 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 783.398760] env[61898]: DEBUG nova.network.neutron [-] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.633574] env[61898]: DEBUG nova.compute.manager [req-d34bba63-3a07-452c-96e8-4a5642792814 req-4644ce17-40ec-4ac2-b9eb-4bd425546447 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Received event network-vif-plugged-238d786d-b9f2-4cbb-86ed-2508303aa88f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 783.633799] env[61898]: DEBUG oslo_concurrency.lockutils [req-d34bba63-3a07-452c-96e8-4a5642792814 req-4644ce17-40ec-4ac2-b9eb-4bd425546447 service nova] Acquiring lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.633989] env[61898]: DEBUG oslo_concurrency.lockutils [req-d34bba63-3a07-452c-96e8-4a5642792814 req-4644ce17-40ec-4ac2-b9eb-4bd425546447 service nova] Lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 783.634169] env[61898]: DEBUG oslo_concurrency.lockutils [req-d34bba63-3a07-452c-96e8-4a5642792814 req-4644ce17-40ec-4ac2-b9eb-4bd425546447 service nova] Lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 783.634335] env[61898]: DEBUG nova.compute.manager [req-d34bba63-3a07-452c-96e8-4a5642792814 req-4644ce17-40ec-4ac2-b9eb-4bd425546447 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] No waiting events found dispatching network-vif-plugged-238d786d-b9f2-4cbb-86ed-2508303aa88f {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 783.634495] env[61898]: WARNING nova.compute.manager [req-d34bba63-3a07-452c-96e8-4a5642792814 req-4644ce17-40ec-4ac2-b9eb-4bd425546447 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Received unexpected event network-vif-plugged-238d786d-b9f2-4cbb-86ed-2508303aa88f for instance with vm_state building and task_state spawning. [ 783.679020] env[61898]: DEBUG oslo_vmware.api [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240564, 'name': PowerOffVM_Task, 'duration_secs': 0.162634} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.679297] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 783.679464] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 783.679718] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbf71d08-214c-4632-8ff5-f84fd4442299 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.751277] env[61898]: DEBUG nova.network.neutron [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Successfully updated port: 238d786d-b9f2-4cbb-86ed-2508303aa88f {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 783.767374] env[61898]: INFO nova.compute.manager [-] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Took 1.31 seconds to deallocate network for instance. [ 783.783794] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 783.783794] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 783.783794] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Deleting the datastore file [datastore1] 5b51a1a5-7d54-4063-b680-e8b8b39fc46a {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 783.783975] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2d07311-7ffe-4564-b19f-972b1209e2d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.792396] env[61898]: DEBUG oslo_vmware.api [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for the task: (returnval){ [ 783.792396] env[61898]: value = "task-1240566" [ 783.792396] env[61898]: _type = "Task" [ 783.792396] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.800597] env[61898]: DEBUG oslo_vmware.api [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240566, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.832208] env[61898]: DEBUG nova.compute.utils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 783.833601] env[61898]: DEBUG nova.compute.manager [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 783.833798] env[61898]: DEBUG nova.network.neutron [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 783.886542] env[61898]: DEBUG nova.policy [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080b70de4bd5465e8e696943b90f4ff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2c65efa327e403284ad2e78b3c7b7d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 783.901216] env[61898]: INFO nova.compute.manager [-] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Took 1.25 seconds to deallocate network for instance. [ 784.025310] env[61898]: DEBUG nova.virt.hardware [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 784.025601] env[61898]: DEBUG nova.virt.hardware [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 784.025760] env[61898]: DEBUG nova.virt.hardware [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 784.025943] env[61898]: DEBUG nova.virt.hardware [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 784.026296] env[61898]: DEBUG nova.virt.hardware [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 784.026555] env[61898]: DEBUG nova.virt.hardware [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 784.026824] env[61898]: DEBUG nova.virt.hardware [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 784.027044] env[61898]: DEBUG nova.virt.hardware [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 784.029682] env[61898]: DEBUG nova.virt.hardware [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 784.029682] env[61898]: DEBUG nova.virt.hardware [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 784.029682] env[61898]: DEBUG nova.virt.hardware [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 784.029682] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f378b190-b944-43e8-86ee-d79036280f72 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.036753] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4297c2a-3f7f-431b-ba73-cf135c9af636 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.052868] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Instance VIF info [] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 784.058723] env[61898]: DEBUG oslo.service.loopingcall [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 784.059037] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 784.059286] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6345de85-924c-4abe-9277-cd8af438e5a7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.076436] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 784.076436] env[61898]: value = "task-1240567" [ 784.076436] env[61898]: _type = "Task" [ 784.076436] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.083879] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240567, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.238104] env[61898]: DEBUG nova.network.neutron [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Successfully created port: 8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 784.254965] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "refresh_cache-b106ab9e-08d4-4d18-90e0-13a071c9efb1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 784.255167] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "refresh_cache-b106ab9e-08d4-4d18-90e0-13a071c9efb1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.255299] env[61898]: DEBUG nova.network.neutron [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 784.274206] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.304138] env[61898]: DEBUG oslo_vmware.api [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Task: {'id': task-1240566, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135154} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.304138] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 784.304138] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 784.304138] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.304138] env[61898]: INFO nova.compute.manager [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 784.304138] env[61898]: DEBUG oslo.service.loopingcall [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 784.304138] env[61898]: DEBUG nova.compute.manager [-] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 784.304138] env[61898]: DEBUG nova.network.neutron [-] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 784.336937] env[61898]: DEBUG nova.compute.manager [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 784.407793] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.589717] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240567, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.683281] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5bc22b-55e7-4183-88a0-aa40ec554a7b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.690894] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fcfb04a-0348-4f65-a7bf-05f740a4dbe4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.724392] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc54072c-dafa-44e6-9bcf-8bc1b969342c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.733351] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c504142-9c2d-46fa-b840-19f8cb2baa6a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.746853] env[61898]: DEBUG nova.compute.provider_tree [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 784.812119] env[61898]: DEBUG nova.network.neutron [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.839928] env[61898]: DEBUG nova.compute.manager [req-ade12aba-b30b-4a40-9f25-99edd0859bc2 req-361a94bc-9db3-4149-8c8b-f51a84773e66 service nova] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Received event network-vif-deleted-631e2851-f905-45a8-8e8a-f51849e4bd16 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 784.840331] env[61898]: DEBUG nova.compute.manager [req-ade12aba-b30b-4a40-9f25-99edd0859bc2 req-361a94bc-9db3-4149-8c8b-f51a84773e66 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Received event network-vif-deleted-e15583c8-0ee4-4cbf-bf23-b74d594430fb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 784.840331] env[61898]: INFO nova.compute.manager [req-ade12aba-b30b-4a40-9f25-99edd0859bc2 req-361a94bc-9db3-4149-8c8b-f51a84773e66 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Neutron deleted interface e15583c8-0ee4-4cbf-bf23-b74d594430fb; detaching it from the instance and deleting it from the info cache [ 784.840581] env[61898]: DEBUG nova.network.neutron [req-ade12aba-b30b-4a40-9f25-99edd0859bc2 req-361a94bc-9db3-4149-8c8b-f51a84773e66 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Updating instance_info_cache with network_info: [{"id": "f605f253-f39d-4483-9ab2-634f7db2382b", "address": "fa:16:3e:b9:b2:a9", "network": {"id": "548bba5e-89e4-489d-92d7-d41c4072cb20", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1323681802", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "294362a0c6b04039b589ae5eb0d341ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf605f253-f3", "ovs_interfaceid": "f605f253-f39d-4483-9ab2-634f7db2382b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.948183] env[61898]: DEBUG nova.network.neutron [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Updating instance_info_cache with network_info: [{"id": "238d786d-b9f2-4cbb-86ed-2508303aa88f", "address": "fa:16:3e:74:11:13", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap238d786d-b9", "ovs_interfaceid": "238d786d-b9f2-4cbb-86ed-2508303aa88f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.086833] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240567, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.281492] env[61898]: DEBUG nova.scheduler.client.report [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Updated inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with generation 82 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 785.281842] env[61898]: DEBUG nova.compute.provider_tree [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Updating resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 generation from 82 to 83 during operation: update_inventory {{(pid=61898) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 785.282084] env[61898]: DEBUG nova.compute.provider_tree [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 785.304571] env[61898]: DEBUG nova.network.neutron [-] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.343152] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5c18127f-941a-49b8-b3d6-b5341180256f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.352538] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ab7f12-814a-4187-a7f8-898e259f6b60 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.364774] env[61898]: DEBUG nova.compute.manager [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 785.383594] env[61898]: DEBUG nova.compute.manager [req-ade12aba-b30b-4a40-9f25-99edd0859bc2 req-361a94bc-9db3-4149-8c8b-f51a84773e66 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Detach interface failed, port_id=e15583c8-0ee4-4cbf-bf23-b74d594430fb, reason: Instance 5b51a1a5-7d54-4063-b680-e8b8b39fc46a could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 785.383834] env[61898]: DEBUG nova.compute.manager [req-ade12aba-b30b-4a40-9f25-99edd0859bc2 req-361a94bc-9db3-4149-8c8b-f51a84773e66 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Received event network-vif-deleted-f605f253-f39d-4483-9ab2-634f7db2382b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 785.384014] env[61898]: INFO nova.compute.manager [req-ade12aba-b30b-4a40-9f25-99edd0859bc2 req-361a94bc-9db3-4149-8c8b-f51a84773e66 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Neutron deleted interface f605f253-f39d-4483-9ab2-634f7db2382b; detaching it from the instance and deleting it from the info cache [ 785.384195] env[61898]: DEBUG nova.network.neutron [req-ade12aba-b30b-4a40-9f25-99edd0859bc2 req-361a94bc-9db3-4149-8c8b-f51a84773e66 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.396302] env[61898]: DEBUG nova.virt.hardware [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 785.396558] env[61898]: DEBUG nova.virt.hardware [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 785.396717] env[61898]: DEBUG nova.virt.hardware [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 785.396898] env[61898]: DEBUG nova.virt.hardware [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 785.397395] env[61898]: DEBUG nova.virt.hardware [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 785.397619] env[61898]: DEBUG nova.virt.hardware [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 785.397835] env[61898]: DEBUG nova.virt.hardware [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 785.397998] env[61898]: DEBUG nova.virt.hardware [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 785.398195] env[61898]: DEBUG nova.virt.hardware [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 785.398389] env[61898]: DEBUG nova.virt.hardware [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 785.398563] env[61898]: DEBUG nova.virt.hardware [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 785.400341] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c5e8d3-e0d3-4ade-ad88-127180a28c4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.408991] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4c398f-dba3-4366-b915-2cbd4b0ce667 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.450383] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "refresh_cache-b106ab9e-08d4-4d18-90e0-13a071c9efb1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.450651] env[61898]: DEBUG nova.compute.manager [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Instance network_info: |[{"id": "238d786d-b9f2-4cbb-86ed-2508303aa88f", "address": "fa:16:3e:74:11:13", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap238d786d-b9", "ovs_interfaceid": "238d786d-b9f2-4cbb-86ed-2508303aa88f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 785.451029] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:11:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '238d786d-b9f2-4cbb-86ed-2508303aa88f', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 785.458353] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Creating folder: Project (3ce0562f486e44cc877c1cc31525a13a). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 785.458576] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-305e81c0-e08f-4342-9e50-3368ee9f3cf0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.469148] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Created folder: Project (3ce0562f486e44cc877c1cc31525a13a) in parent group-v267550. [ 785.469322] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Creating folder: Instances. Parent ref: group-v267612. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 785.469519] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e46159ce-1690-4aad-89ea-eb885cb10c67 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.477632] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Created folder: Instances in parent group-v267612. [ 785.477847] env[61898]: DEBUG oslo.service.loopingcall [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 785.478044] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 785.478255] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bde27c3f-edb0-4af9-b598-5dd0e731add2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.497662] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 785.497662] env[61898]: value = "task-1240570" [ 785.497662] env[61898]: _type = "Task" [ 785.497662] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.504617] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240570, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.588031] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240567, 'name': CreateVM_Task, 'duration_secs': 1.256637} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.588290] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 785.588814] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.589033] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.589418] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 785.589712] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba133cc8-ab3f-49e6-bfba-b79defef4179 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.595091] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 785.595091] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52433a3b-a36a-b36d-2cf2-96a8f16f2086" [ 785.595091] env[61898]: _type = "Task" [ 785.595091] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.604306] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52433a3b-a36a-b36d-2cf2-96a8f16f2086, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.788141] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.458s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.791883] env[61898]: DEBUG nova.compute.manager [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 785.791883] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.525s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.792615] env[61898]: INFO nova.compute.claims [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 785.810090] env[61898]: INFO nova.compute.manager [-] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Took 1.51 seconds to deallocate network for instance. [ 785.823039] env[61898]: DEBUG nova.compute.manager [req-9f4fa140-aa0a-44fd-aaa8-e2e39a2a12e8 req-c4a67fa9-cf06-4272-993f-9b77d5a137c2 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Received event network-changed-238d786d-b9f2-4cbb-86ed-2508303aa88f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 785.823395] env[61898]: DEBUG nova.compute.manager [req-9f4fa140-aa0a-44fd-aaa8-e2e39a2a12e8 req-c4a67fa9-cf06-4272-993f-9b77d5a137c2 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Refreshing instance network info cache due to event network-changed-238d786d-b9f2-4cbb-86ed-2508303aa88f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 785.823968] env[61898]: DEBUG oslo_concurrency.lockutils [req-9f4fa140-aa0a-44fd-aaa8-e2e39a2a12e8 req-c4a67fa9-cf06-4272-993f-9b77d5a137c2 service nova] Acquiring lock "refresh_cache-b106ab9e-08d4-4d18-90e0-13a071c9efb1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.823968] env[61898]: DEBUG oslo_concurrency.lockutils [req-9f4fa140-aa0a-44fd-aaa8-e2e39a2a12e8 req-c4a67fa9-cf06-4272-993f-9b77d5a137c2 service nova] Acquired lock "refresh_cache-b106ab9e-08d4-4d18-90e0-13a071c9efb1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.823968] env[61898]: DEBUG nova.network.neutron [req-9f4fa140-aa0a-44fd-aaa8-e2e39a2a12e8 req-c4a67fa9-cf06-4272-993f-9b77d5a137c2 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Refreshing network info cache for port 238d786d-b9f2-4cbb-86ed-2508303aa88f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 785.889290] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb037e4d-4952-4818-83d3-cfa57ff533cc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.898469] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcfd7f5-364a-4292-b5bb-e5cd2d9067bc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.933182] env[61898]: DEBUG nova.compute.manager [req-ade12aba-b30b-4a40-9f25-99edd0859bc2 req-361a94bc-9db3-4149-8c8b-f51a84773e66 service nova] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Detach interface failed, port_id=f605f253-f39d-4483-9ab2-634f7db2382b, reason: Instance 5b51a1a5-7d54-4063-b680-e8b8b39fc46a could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 786.008274] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240570, 'name': CreateVM_Task, 'duration_secs': 0.281437} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.008519] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 786.009736] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.089992] env[61898]: DEBUG nova.network.neutron [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Successfully updated port: 8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 786.105639] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52433a3b-a36a-b36d-2cf2-96a8f16f2086, 'name': SearchDatastore_Task, 'duration_secs': 0.01085} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.106610] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.106988] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.107291] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.107440] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.107651] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.107900] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.108226] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 786.108719] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8dac739c-cf46-4dea-8bac-78638fd520fc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.110782] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e53fe6d-2a73-4546-a424-8d4123972b7a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.116569] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 786.116569] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]520283e4-0917-7daa-6d23-a4f1dcc213df" [ 786.116569] env[61898]: _type = "Task" [ 786.116569] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.120668] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.120842] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 786.121856] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bbdeba9-2594-4138-8098-671c4724c38e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.128793] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520283e4-0917-7daa-6d23-a4f1dcc213df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.131675] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 786.131675] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5213fdad-bd70-4b57-71bb-d49841243ec3" [ 786.131675] env[61898]: _type = "Task" [ 786.131675] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.139021] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5213fdad-bd70-4b57-71bb-d49841243ec3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.299632] env[61898]: DEBUG nova.compute.utils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 786.303696] env[61898]: DEBUG nova.compute.manager [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 786.303863] env[61898]: DEBUG nova.network.neutron [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 786.320145] env[61898]: DEBUG oslo_concurrency.lockutils [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.346150] env[61898]: DEBUG nova.policy [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e2c909f4306477d8fc741ab3aac9d02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e8b71885c83418fb13e216f804ffeeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 786.593211] env[61898]: DEBUG nova.network.neutron [req-9f4fa140-aa0a-44fd-aaa8-e2e39a2a12e8 req-c4a67fa9-cf06-4272-993f-9b77d5a137c2 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Updated VIF entry in instance network info cache for port 238d786d-b9f2-4cbb-86ed-2508303aa88f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 786.593211] env[61898]: DEBUG nova.network.neutron [req-9f4fa140-aa0a-44fd-aaa8-e2e39a2a12e8 req-c4a67fa9-cf06-4272-993f-9b77d5a137c2 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Updating instance_info_cache with network_info: [{"id": "238d786d-b9f2-4cbb-86ed-2508303aa88f", "address": "fa:16:3e:74:11:13", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap238d786d-b9", "ovs_interfaceid": "238d786d-b9f2-4cbb-86ed-2508303aa88f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.593497] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.593831] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.593831] env[61898]: DEBUG nova.network.neutron [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 786.629046] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520283e4-0917-7daa-6d23-a4f1dcc213df, 'name': SearchDatastore_Task, 'duration_secs': 0.008747} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.629497] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.629731] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.629996] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.630907] env[61898]: DEBUG nova.network.neutron [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Successfully created port: 9eb9e879-2a9d-4f9d-8a74-ae7d21738e53 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 786.641649] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5213fdad-bd70-4b57-71bb-d49841243ec3, 'name': SearchDatastore_Task, 'duration_secs': 0.00862} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.642435] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab3f6dda-b231-4949-8f13-295fbbc1013b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.647328] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 786.647328] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5224bcd4-e1d7-e4df-0455-ea14488cdbf6" [ 786.647328] env[61898]: _type = "Task" [ 786.647328] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.654904] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5224bcd4-e1d7-e4df-0455-ea14488cdbf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.804940] env[61898]: DEBUG nova.compute.manager [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 787.098958] env[61898]: DEBUG oslo_concurrency.lockutils [req-9f4fa140-aa0a-44fd-aaa8-e2e39a2a12e8 req-c4a67fa9-cf06-4272-993f-9b77d5a137c2 service nova] Releasing lock "refresh_cache-b106ab9e-08d4-4d18-90e0-13a071c9efb1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.137381] env[61898]: DEBUG nova.network.neutron [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 787.162226] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5224bcd4-e1d7-e4df-0455-ea14488cdbf6, 'name': SearchDatastore_Task, 'duration_secs': 0.014294} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.162487] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.162777] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e/ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.163236] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.163450] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 787.163672] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df284a23-2a37-4fa0-bc29-022d9d8c3d7a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.166319] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4eea1322-696b-420b-b621-cc76f6b3f079 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.168694] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65836b0a-815c-4970-995c-75c1610a904f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.177542] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9588b1d4-5f9c-41e1-bde7-ef09466626f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.180892] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 787.180892] env[61898]: value = "task-1240571" [ 787.180892] env[61898]: _type = "Task" [ 787.180892] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.184146] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 787.184325] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 787.188198] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bac009da-cf78-42eb-8008-cdf3275aef8d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.217752] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda53e92-8c40-4518-9a78-157b0cc6fa19 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.224308] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 787.224308] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]520f1591-3e36-f947-5c83-178141a20130" [ 787.224308] env[61898]: _type = "Task" [ 787.224308] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.224529] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240571, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.232269] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cb5ea6-1dee-4ea1-b6ab-23b8637c9b88 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.238989] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520f1591-3e36-f947-5c83-178141a20130, 'name': SearchDatastore_Task, 'duration_secs': 0.010108} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.240110] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f530d0ee-b363-435b-bd2b-bc1273bf7118 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.249828] env[61898]: DEBUG nova.compute.provider_tree [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.254306] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 787.254306] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5228958e-ef39-5a1d-b143-11a177699b27" [ 787.254306] env[61898]: _type = "Task" [ 787.254306] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.264153] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5228958e-ef39-5a1d-b143-11a177699b27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.300055] env[61898]: DEBUG nova.network.neutron [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [{"id": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "address": "fa:16:3e:40:36:b4", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eab7c47-4a", "ovs_interfaceid": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.696145] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240571, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493886} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.696417] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e/ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 787.696633] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 787.696881] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d94f17f6-5a0c-4afe-8a7b-6278f3de9b26 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.703752] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 787.703752] env[61898]: value = "task-1240572" [ 787.703752] env[61898]: _type = "Task" [ 787.703752] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.713687] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.753115] env[61898]: DEBUG nova.scheduler.client.report [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 787.772826] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5228958e-ef39-5a1d-b143-11a177699b27, 'name': SearchDatastore_Task, 'duration_secs': 0.009737} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.773029] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.775277] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] b106ab9e-08d4-4d18-90e0-13a071c9efb1/b106ab9e-08d4-4d18-90e0-13a071c9efb1.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.775277] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94136a38-d407-45a2-9f03-5b3df826ed8e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.780819] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 787.780819] env[61898]: value = "task-1240573" [ 787.780819] env[61898]: _type = "Task" [ 787.780819] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.789542] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240573, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.803670] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.804165] env[61898]: DEBUG nova.compute.manager [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Instance network_info: |[{"id": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "address": "fa:16:3e:40:36:b4", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eab7c47-4a", "ovs_interfaceid": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 787.804781] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:36:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b56036cd-97ac-47f5-9089-7b38bfe99228', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 787.812948] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Creating folder: Project (e2c65efa327e403284ad2e78b3c7b7d9). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 787.813245] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de26832e-b347-4f63-b719-b20c97ae04d7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.817723] env[61898]: DEBUG nova.compute.manager [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 787.822236] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Created folder: Project (e2c65efa327e403284ad2e78b3c7b7d9) in parent group-v267550. [ 787.822420] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Creating folder: Instances. Parent ref: group-v267615. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 787.822650] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a9aed06-bbd9-45f7-974d-0f07d4abbb2c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.831500] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Created folder: Instances in parent group-v267615. [ 787.831680] env[61898]: DEBUG oslo.service.loopingcall [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 787.831871] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 787.832094] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f86a4e2-11c5-46d3-8118-1d48388eef06 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.855121] env[61898]: DEBUG nova.compute.manager [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received event network-vif-plugged-8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 787.855337] env[61898]: DEBUG oslo_concurrency.lockutils [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] Acquiring lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.855791] env[61898]: DEBUG oslo_concurrency.lockutils [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.855791] env[61898]: DEBUG oslo_concurrency.lockutils [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.855904] env[61898]: DEBUG nova.compute.manager [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] No waiting events found dispatching network-vif-plugged-8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 787.856096] env[61898]: WARNING nova.compute.manager [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received unexpected event network-vif-plugged-8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2 for instance with vm_state building and task_state spawning. [ 787.856291] env[61898]: DEBUG nova.compute.manager [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received event network-changed-8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 787.856485] env[61898]: DEBUG nova.compute.manager [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Refreshing instance network info cache due to event network-changed-8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 787.856675] env[61898]: DEBUG oslo_concurrency.lockutils [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] Acquiring lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.856809] env[61898]: DEBUG oslo_concurrency.lockutils [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] Acquired lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.856958] env[61898]: DEBUG nova.network.neutron [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Refreshing network info cache for port 8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.863370] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 787.863370] env[61898]: value = "task-1240576" [ 787.863370] env[61898]: _type = "Task" [ 787.863370] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.865546] env[61898]: DEBUG nova.virt.hardware [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 787.865772] env[61898]: DEBUG nova.virt.hardware [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 787.865926] env[61898]: DEBUG nova.virt.hardware [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.866124] env[61898]: DEBUG nova.virt.hardware [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 787.866271] env[61898]: DEBUG nova.virt.hardware [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.866415] env[61898]: DEBUG nova.virt.hardware [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 787.866617] env[61898]: DEBUG nova.virt.hardware [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 787.866779] env[61898]: DEBUG nova.virt.hardware [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 787.867019] env[61898]: DEBUG nova.virt.hardware [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 787.867233] env[61898]: DEBUG nova.virt.hardware [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 787.867424] env[61898]: DEBUG nova.virt.hardware [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 787.868583] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1fe6bb-ba88-448a-9834-41ddd596987d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.882016] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240576, 'name': CreateVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.883328] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa78f4f-0ee5-426c-8a8f-6cb12a0f8e7a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.216634] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089234} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.217392] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.218727] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ed781b-bd7e-468b-8d00-017cb9ca47db {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.244258] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e/ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.247018] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed29ce09-a963-4182-9b5a-74b6af5ced58 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.267564] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.268308] env[61898]: DEBUG nova.compute.manager [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 788.271568] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 788.271568] env[61898]: value = "task-1240577" [ 788.271568] env[61898]: _type = "Task" [ 788.271568] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.272358] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.012s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.272714] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.274988] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.772s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.276980] env[61898]: INFO nova.compute.claims [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 788.295590] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240573, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.300927] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240577, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.337032] env[61898]: INFO nova.scheduler.client.report [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Deleted allocations for instance 34338563-05d4-477b-8480-6ef4cbf28e72 [ 788.378401] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240576, 'name': CreateVM_Task, 'duration_secs': 0.488716} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.378936] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 788.380015] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.380378] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.380852] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 788.381234] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca054164-7fc8-47b5-a9e9-869f4852f75a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.389023] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 788.389023] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]523c9e76-67d8-3c02-e599-18a40d2277f3" [ 788.389023] env[61898]: _type = "Task" [ 788.389023] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.398590] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523c9e76-67d8-3c02-e599-18a40d2277f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.590776] env[61898]: DEBUG nova.network.neutron [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Successfully updated port: 9eb9e879-2a9d-4f9d-8a74-ae7d21738e53 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 788.597641] env[61898]: DEBUG nova.network.neutron [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updated VIF entry in instance network info cache for port 8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 788.599123] env[61898]: DEBUG nova.network.neutron [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [{"id": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "address": "fa:16:3e:40:36:b4", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eab7c47-4a", "ovs_interfaceid": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.775653] env[61898]: DEBUG nova.compute.utils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 788.777343] env[61898]: DEBUG nova.compute.manager [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 788.777520] env[61898]: DEBUG nova.network.neutron [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 788.812806] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240573, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.605161} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.813262] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240577, 'name': ReconfigVM_Task, 'duration_secs': 0.29995} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.813543] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] b106ab9e-08d4-4d18-90e0-13a071c9efb1/b106ab9e-08d4-4d18-90e0-13a071c9efb1.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 788.813751] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.814023] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Reconfigured VM instance instance-0000003d to attach disk [datastore1] ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e/ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 788.814821] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c673abf-7cc5-4261-9e3b-c35a39c6a455 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.816680] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02f6d948-4486-4a91-a347-fbf7f75c2136 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.826471] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 788.826471] env[61898]: value = "task-1240578" [ 788.826471] env[61898]: _type = "Task" [ 788.826471] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.826773] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 788.826773] env[61898]: value = "task-1240579" [ 788.826773] env[61898]: _type = "Task" [ 788.826773] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.842103] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240578, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.847910] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240579, 'name': Rename_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.848853] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf359631-01fb-49c0-9833-d30abcf6e2c9 tempest-VolumesAdminNegativeTest-1891573675 tempest-VolumesAdminNegativeTest-1891573675-project-member] Lock "34338563-05d4-477b-8480-6ef4cbf28e72" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.993s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.868537] env[61898]: DEBUG nova.policy [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ce8ddf4b7fe4e0583f09e7f88ab5e70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '975e564bd7f442629018b97007460e00', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 788.898125] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523c9e76-67d8-3c02-e599-18a40d2277f3, 'name': SearchDatastore_Task, 'duration_secs': 0.014589} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.898458] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.898691] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.898921] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.899075] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.899254] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 788.899507] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8a7374c-a750-48c0-b869-31e25a9f21b8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.907485] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 788.907663] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 788.908392] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c83bdea-427b-406a-82ab-94b216597260 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.914901] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 788.914901] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52298628-7963-32ca-9a4a-f34a581c2bf1" [ 788.914901] env[61898]: _type = "Task" [ 788.914901] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.922499] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52298628-7963-32ca-9a4a-f34a581c2bf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.093227] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "refresh_cache-52a584e1-61ae-447d-90e0-e15d32a96314" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.093390] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "refresh_cache-52a584e1-61ae-447d-90e0-e15d32a96314" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.093496] env[61898]: DEBUG nova.network.neutron [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 789.101605] env[61898]: DEBUG oslo_concurrency.lockutils [req-748389cd-5926-4c33-8ab1-f673267dab64 req-2e643735-c358-4138-9430-9280b6cc1978 service nova] Releasing lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.280638] env[61898]: DEBUG nova.compute.manager [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 789.292631] env[61898]: DEBUG nova.network.neutron [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Successfully created port: 9f33f2c4-4626-4230-90ea-e91c5f0da486 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 789.346622] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240578, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066842} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.351882] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 789.351882] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240579, 'name': Rename_Task, 'duration_secs': 0.13699} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.351882] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db94cb2d-9812-46b4-a908-b225d50e3e09 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.354804] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 789.358529] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a491fd7-e872-4a1a-b83c-73e6004dd091 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.384725] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] b106ab9e-08d4-4d18-90e0-13a071c9efb1/b106ab9e-08d4-4d18-90e0-13a071c9efb1.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 789.388735] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28f52c7a-eb07-426e-a003-e905d3696229 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.403564] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 789.403564] env[61898]: value = "task-1240580" [ 789.403564] env[61898]: _type = "Task" [ 789.403564] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.409757] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 789.409757] env[61898]: value = "task-1240581" [ 789.409757] env[61898]: _type = "Task" [ 789.409757] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.416277] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240580, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.426288] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240581, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.431750] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52298628-7963-32ca-9a4a-f34a581c2bf1, 'name': SearchDatastore_Task, 'duration_secs': 0.007465} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.432748] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06f6ef6c-ef07-4756-b7b7-3c8a4f6ad8d6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.440263] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 789.440263] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5282789a-53c0-bd09-64ff-36aa19aa09c7" [ 789.440263] env[61898]: _type = "Task" [ 789.440263] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.448371] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5282789a-53c0-bd09-64ff-36aa19aa09c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.657360] env[61898]: DEBUG nova.network.neutron [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 789.783659] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d9943c-bdbb-4472-92e3-c034094635a5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.795755] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8c02db-6308-44fa-8752-dfed32310eb1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.843621] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56c696b-2224-41a0-b06e-04d7a197e075 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.854166] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b5d4e1-dd11-4630-8ced-9cdc782a5bb6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.871381] env[61898]: DEBUG nova.compute.provider_tree [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 789.915060] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240580, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.923835] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240581, 'name': ReconfigVM_Task, 'duration_secs': 0.339839} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.924295] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Reconfigured VM instance instance-0000003e to attach disk [datastore1] b106ab9e-08d4-4d18-90e0-13a071c9efb1/b106ab9e-08d4-4d18-90e0-13a071c9efb1.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.925066] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24c99118-5bc7-4f38-b935-bcf1e2084e62 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.931631] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 789.931631] env[61898]: value = "task-1240582" [ 789.931631] env[61898]: _type = "Task" [ 789.931631] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.940379] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240582, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.954238] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5282789a-53c0-bd09-64ff-36aa19aa09c7, 'name': SearchDatastore_Task, 'duration_secs': 0.00973} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.957180] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.957703] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] e851d73d-58f0-486a-a95c-70d07e5faad2/e851d73d-58f0-486a-a95c-70d07e5faad2.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 789.958256] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83d42c57-24e3-44e5-ad88-7ba2215e4025 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.968157] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 789.968157] env[61898]: value = "task-1240583" [ 789.968157] env[61898]: _type = "Task" [ 789.968157] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.978469] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240583, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.060444] env[61898]: DEBUG nova.network.neutron [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Updating instance_info_cache with network_info: [{"id": "9eb9e879-2a9d-4f9d-8a74-ae7d21738e53", "address": "fa:16:3e:ad:2d:34", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9eb9e879-2a", "ovs_interfaceid": "9eb9e879-2a9d-4f9d-8a74-ae7d21738e53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.119148] env[61898]: DEBUG nova.compute.manager [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Received event network-vif-plugged-9eb9e879-2a9d-4f9d-8a74-ae7d21738e53 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 790.119148] env[61898]: DEBUG oslo_concurrency.lockutils [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] Acquiring lock "52a584e1-61ae-447d-90e0-e15d32a96314-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.119148] env[61898]: DEBUG oslo_concurrency.lockutils [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] Lock "52a584e1-61ae-447d-90e0-e15d32a96314-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.119148] env[61898]: DEBUG oslo_concurrency.lockutils [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] Lock "52a584e1-61ae-447d-90e0-e15d32a96314-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.119148] env[61898]: DEBUG nova.compute.manager [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] No waiting events found dispatching network-vif-plugged-9eb9e879-2a9d-4f9d-8a74-ae7d21738e53 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 790.119148] env[61898]: WARNING nova.compute.manager [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Received unexpected event network-vif-plugged-9eb9e879-2a9d-4f9d-8a74-ae7d21738e53 for instance with vm_state building and task_state spawning. [ 790.119485] env[61898]: DEBUG nova.compute.manager [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Received event network-changed-9eb9e879-2a9d-4f9d-8a74-ae7d21738e53 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 790.119764] env[61898]: DEBUG nova.compute.manager [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Refreshing instance network info cache due to event network-changed-9eb9e879-2a9d-4f9d-8a74-ae7d21738e53. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 790.120529] env[61898]: DEBUG oslo_concurrency.lockutils [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] Acquiring lock "refresh_cache-52a584e1-61ae-447d-90e0-e15d32a96314" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.295269] env[61898]: DEBUG nova.compute.manager [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 790.332307] env[61898]: DEBUG nova.virt.hardware [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 790.332759] env[61898]: DEBUG nova.virt.hardware [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 790.333110] env[61898]: DEBUG nova.virt.hardware [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 790.333486] env[61898]: DEBUG nova.virt.hardware [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 790.333786] env[61898]: DEBUG nova.virt.hardware [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 790.334133] env[61898]: DEBUG nova.virt.hardware [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 790.334517] env[61898]: DEBUG nova.virt.hardware [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 790.334936] env[61898]: DEBUG nova.virt.hardware [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 790.335301] env[61898]: DEBUG nova.virt.hardware [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 790.335644] env[61898]: DEBUG nova.virt.hardware [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 790.335983] env[61898]: DEBUG nova.virt.hardware [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 790.337143] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b767cbe8-7732-41f2-9bdd-476ea1de170a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.347237] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beea72df-36e8-4627-aad4-bdddffcba374 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.410166] env[61898]: DEBUG nova.scheduler.client.report [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Updated inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with generation 83 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 790.410704] env[61898]: DEBUG nova.compute.provider_tree [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Updating resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 generation from 83 to 84 during operation: update_inventory {{(pid=61898) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 790.410981] env[61898]: DEBUG nova.compute.provider_tree [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 790.421266] env[61898]: DEBUG oslo_vmware.api [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240580, 'name': PowerOnVM_Task, 'duration_secs': 0.769108} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.421266] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 790.421266] env[61898]: DEBUG nova.compute.manager [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 790.421266] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0dd16e-34c4-45ae-82b6-1e22e09ca9ab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.441804] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240582, 'name': Rename_Task, 'duration_secs': 0.146709} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.442083] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.442324] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51a620bd-e361-4dca-a5e5-079bf9d3d02a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.449416] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 790.449416] env[61898]: value = "task-1240584" [ 790.449416] env[61898]: _type = "Task" [ 790.449416] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.457315] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240584, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.475511] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240583, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462336} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.475769] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] e851d73d-58f0-486a-a95c-70d07e5faad2/e851d73d-58f0-486a-a95c-70d07e5faad2.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 790.476047] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 790.476244] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c335555-a0d2-4468-8eb7-03205c41c742 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.482904] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 790.482904] env[61898]: value = "task-1240585" [ 790.482904] env[61898]: _type = "Task" [ 790.482904] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.492033] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240585, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.563065] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "refresh_cache-52a584e1-61ae-447d-90e0-e15d32a96314" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.563431] env[61898]: DEBUG nova.compute.manager [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Instance network_info: |[{"id": "9eb9e879-2a9d-4f9d-8a74-ae7d21738e53", "address": "fa:16:3e:ad:2d:34", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9eb9e879-2a", "ovs_interfaceid": "9eb9e879-2a9d-4f9d-8a74-ae7d21738e53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 790.563816] env[61898]: DEBUG oslo_concurrency.lockutils [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] Acquired lock "refresh_cache-52a584e1-61ae-447d-90e0-e15d32a96314" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.564247] env[61898]: DEBUG nova.network.neutron [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Refreshing network info cache for port 9eb9e879-2a9d-4f9d-8a74-ae7d21738e53 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 790.566198] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:2d:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9eb9e879-2a9d-4f9d-8a74-ae7d21738e53', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 790.576653] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating folder: Project (7e8b71885c83418fb13e216f804ffeeb). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.580819] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41f02f31-b4a4-41b8-924c-4cf47a5f1506 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.592061] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Created folder: Project (7e8b71885c83418fb13e216f804ffeeb) in parent group-v267550. [ 790.592293] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating folder: Instances. Parent ref: group-v267618. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 790.592563] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c47874bb-dc46-4d3f-b073-ecaf09b8dcd2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.601864] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Created folder: Instances in parent group-v267618. [ 790.602128] env[61898]: DEBUG oslo.service.loopingcall [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 790.602325] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 790.602526] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b964ab7-f679-4582-8cfa-6dd10342d90d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.621979] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 790.621979] env[61898]: value = "task-1240588" [ 790.621979] env[61898]: _type = "Task" [ 790.621979] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.629940] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240588, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.825250] env[61898]: DEBUG nova.network.neutron [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Successfully updated port: 9f33f2c4-4626-4230-90ea-e91c5f0da486 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 790.916456] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.641s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.916919] env[61898]: DEBUG nova.compute.manager [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 790.920492] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.780s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.923192] env[61898]: INFO nova.compute.claims [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.936801] env[61898]: DEBUG nova.compute.manager [req-19d966ad-1cee-4b92-a1e5-e38fe064b1ef req-29f86fd2-db25-4f30-aba3-320c6d5bfb54 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Received event network-vif-plugged-9f33f2c4-4626-4230-90ea-e91c5f0da486 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 790.937077] env[61898]: DEBUG oslo_concurrency.lockutils [req-19d966ad-1cee-4b92-a1e5-e38fe064b1ef req-29f86fd2-db25-4f30-aba3-320c6d5bfb54 service nova] Acquiring lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.937314] env[61898]: DEBUG oslo_concurrency.lockutils [req-19d966ad-1cee-4b92-a1e5-e38fe064b1ef req-29f86fd2-db25-4f30-aba3-320c6d5bfb54 service nova] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.937495] env[61898]: DEBUG oslo_concurrency.lockutils [req-19d966ad-1cee-4b92-a1e5-e38fe064b1ef req-29f86fd2-db25-4f30-aba3-320c6d5bfb54 service nova] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.937667] env[61898]: DEBUG nova.compute.manager [req-19d966ad-1cee-4b92-a1e5-e38fe064b1ef req-29f86fd2-db25-4f30-aba3-320c6d5bfb54 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] No waiting events found dispatching network-vif-plugged-9f33f2c4-4626-4230-90ea-e91c5f0da486 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 790.937845] env[61898]: WARNING nova.compute.manager [req-19d966ad-1cee-4b92-a1e5-e38fe064b1ef req-29f86fd2-db25-4f30-aba3-320c6d5bfb54 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Received unexpected event network-vif-plugged-9f33f2c4-4626-4230-90ea-e91c5f0da486 for instance with vm_state building and task_state spawning. [ 790.941704] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.963355] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240584, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.994440] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240585, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062252} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.994731] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 790.998592] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f21b47-8f95-4b85-a651-9feb82ce26e4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.025032] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] e851d73d-58f0-486a-a95c-70d07e5faad2/e851d73d-58f0-486a-a95c-70d07e5faad2.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 791.025032] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74b3c935-648e-4cd6-8641-3e152426c6d1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.046156] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 791.046156] env[61898]: value = "task-1240589" [ 791.046156] env[61898]: _type = "Task" [ 791.046156] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.055898] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240589, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.134774] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240588, 'name': CreateVM_Task, 'duration_secs': 0.390281} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.134935] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 791.135731] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.136492] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.140031] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 791.140031] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bf3293a-8113-42f2-8b24-fc85a0c41d1c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.142496] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 791.142496] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]520bee4a-a2cf-f4f9-955b-70ebb7461f4c" [ 791.142496] env[61898]: _type = "Task" [ 791.142496] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.150425] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520bee4a-a2cf-f4f9-955b-70ebb7461f4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.205069] env[61898]: DEBUG nova.network.neutron [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Updated VIF entry in instance network info cache for port 9eb9e879-2a9d-4f9d-8a74-ae7d21738e53. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 791.205437] env[61898]: DEBUG nova.network.neutron [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Updating instance_info_cache with network_info: [{"id": "9eb9e879-2a9d-4f9d-8a74-ae7d21738e53", "address": "fa:16:3e:ad:2d:34", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9eb9e879-2a", "ovs_interfaceid": "9eb9e879-2a9d-4f9d-8a74-ae7d21738e53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.327084] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.327365] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.327398] env[61898]: DEBUG nova.network.neutron [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.427392] env[61898]: DEBUG nova.compute.utils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 791.428916] env[61898]: DEBUG nova.compute.manager [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 791.429248] env[61898]: DEBUG nova.network.neutron [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 791.461322] env[61898]: DEBUG oslo_vmware.api [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240584, 'name': PowerOnVM_Task, 'duration_secs': 0.52039} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.461649] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.462293] env[61898]: INFO nova.compute.manager [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Took 8.60 seconds to spawn the instance on the hypervisor. [ 791.462572] env[61898]: DEBUG nova.compute.manager [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 791.463341] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a506e589-f522-4bde-98de-43801bfef3e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.515854] env[61898]: DEBUG nova.policy [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2adf6bbc77ce44a8bb219c8ad019f293', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f368913c359420cbd16ef48aa83e27c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 791.556560] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240589, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.654022] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520bee4a-a2cf-f4f9-955b-70ebb7461f4c, 'name': SearchDatastore_Task, 'duration_secs': 0.009264} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.654727] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.654727] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.654902] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.654902] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.655078] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.655354] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7617a1b4-600a-4fb9-a6f3-8ef21d30deee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.664016] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.664113] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 791.664880] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01fe65b4-4479-4fab-8502-ea2c10e642a0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.673093] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 791.673093] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52beee1b-3222-01be-91d3-b5a3096d49db" [ 791.673093] env[61898]: _type = "Task" [ 791.673093] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.683020] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52beee1b-3222-01be-91d3-b5a3096d49db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.707645] env[61898]: DEBUG oslo_concurrency.lockutils [req-3b857ca1-5a26-4b4f-8d14-0d4d069c6c14 req-664a6be4-98f9-4a5d-ad8f-8def9c887bbb service nova] Releasing lock "refresh_cache-52a584e1-61ae-447d-90e0-e15d32a96314" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 791.867057] env[61898]: DEBUG nova.network.neutron [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Successfully created port: 93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 791.885743] env[61898]: DEBUG nova.network.neutron [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.904346] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquiring lock "ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.904700] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.005s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.905051] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquiring lock "ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.905124] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.905303] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.907798] env[61898]: INFO nova.compute.manager [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Terminating instance [ 791.934391] env[61898]: DEBUG nova.compute.manager [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 791.983611] env[61898]: INFO nova.compute.manager [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Took 32.70 seconds to build instance. [ 792.061255] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240589, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.146950] env[61898]: DEBUG nova.network.neutron [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance_info_cache with network_info: [{"id": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "address": "fa:16:3e:42:f4:b3", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f33f2c4-46", "ovs_interfaceid": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.184676] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52beee1b-3222-01be-91d3-b5a3096d49db, 'name': SearchDatastore_Task, 'duration_secs': 0.010631} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.188131] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc3d2390-bd69-419e-8593-9e279be377d7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.197773] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 792.197773] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5200f55a-e692-81cc-a424-007e66c88b91" [ 792.197773] env[61898]: _type = "Task" [ 792.197773] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.209362] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5200f55a-e692-81cc-a424-007e66c88b91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.305204] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a61cdc-eabb-4a2d-bc4e-96cb19b45c97 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.313765] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2527f9-f5d3-44fc-9cbc-7904e18b13a9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.346786] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26e3465-a156-4b09-a4f1-ab1721fa9ac4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.354571] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f757c2b-8132-456f-aa0c-87dbb34dec4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.370701] env[61898]: DEBUG nova.compute.provider_tree [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.414591] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquiring lock "refresh_cache-ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.414591] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquired lock "refresh_cache-ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.414794] env[61898]: DEBUG nova.network.neutron [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 792.486357] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b87acfca-ef1c-433e-ae9c-8b25d2d6c584 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.508s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.563056] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240589, 'name': ReconfigVM_Task, 'duration_secs': 1.02011} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.563056] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Reconfigured VM instance instance-0000003f to attach disk [datastore1] e851d73d-58f0-486a-a95c-70d07e5faad2/e851d73d-58f0-486a-a95c-70d07e5faad2.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 792.563056] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee32f4b9-a548-4b59-8d0c-ae5eb85f5234 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.569607] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 792.569607] env[61898]: value = "task-1240590" [ 792.569607] env[61898]: _type = "Task" [ 792.569607] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.578581] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240590, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.652836] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.653444] env[61898]: DEBUG nova.compute.manager [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Instance network_info: |[{"id": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "address": "fa:16:3e:42:f4:b3", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f33f2c4-46", "ovs_interfaceid": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 792.653776] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:f4:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f33f2c4-4626-4230-90ea-e91c5f0da486', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 792.664519] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating folder: Project (975e564bd7f442629018b97007460e00). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.664951] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ea9511c-550a-469d-b67a-17f5e6612115 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.675754] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Created folder: Project (975e564bd7f442629018b97007460e00) in parent group-v267550. [ 792.676024] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating folder: Instances. Parent ref: group-v267621. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.676295] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9dc5597b-a3e5-4bd1-832c-222bf8f9ddac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.686609] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Created folder: Instances in parent group-v267621. [ 792.686609] env[61898]: DEBUG oslo.service.loopingcall [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 792.686609] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 792.686609] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-38bbe993-4ede-4e35-bd32-181da0b376ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.714485] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5200f55a-e692-81cc-a424-007e66c88b91, 'name': SearchDatastore_Task, 'duration_secs': 0.014368} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.717548] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.717548] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 52a584e1-61ae-447d-90e0-e15d32a96314/52a584e1-61ae-447d-90e0-e15d32a96314.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.717548] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 792.717548] env[61898]: value = "task-1240593" [ 792.717548] env[61898]: _type = "Task" [ 792.717548] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.717863] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-543e3b88-73c6-47cd-a147-c091bb47f09c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.732593] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240593, 'name': CreateVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.734894] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 792.734894] env[61898]: value = "task-1240594" [ 792.734894] env[61898]: _type = "Task" [ 792.734894] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.746155] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.876563] env[61898]: DEBUG nova.scheduler.client.report [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 792.950590] env[61898]: DEBUG nova.compute.manager [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 792.956089] env[61898]: DEBUG nova.network.neutron [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.987032] env[61898]: DEBUG nova.virt.hardware [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 792.987353] env[61898]: DEBUG nova.virt.hardware [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 792.987512] env[61898]: DEBUG nova.virt.hardware [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.987690] env[61898]: DEBUG nova.virt.hardware [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 792.987834] env[61898]: DEBUG nova.virt.hardware [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.988019] env[61898]: DEBUG nova.virt.hardware [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 792.988276] env[61898]: DEBUG nova.virt.hardware [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 792.988445] env[61898]: DEBUG nova.virt.hardware [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 792.988614] env[61898]: DEBUG nova.virt.hardware [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 792.988817] env[61898]: DEBUG nova.virt.hardware [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 792.989021] env[61898]: DEBUG nova.virt.hardware [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 792.990439] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a289b1d-870b-487b-8469-add50afb8246 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.993618] env[61898]: DEBUG nova.compute.manager [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 793.005297] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112676f4-017f-49bf-9d8b-601bfc81442f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.069216] env[61898]: DEBUG nova.network.neutron [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.091925] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240590, 'name': Rename_Task, 'duration_secs': 0.196816} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.092366] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 793.092666] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81c68b87-5afe-42fb-9206-193851c857eb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.104789] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 793.104789] env[61898]: value = "task-1240595" [ 793.104789] env[61898]: _type = "Task" [ 793.104789] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.116017] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240595, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.121903] env[61898]: DEBUG nova.compute.manager [req-94ff68c3-6e9c-40f1-b6a3-cc4c0f80eddf req-4e614214-9506-4974-83cc-6a40ebb63b11 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Received event network-changed-9f33f2c4-4626-4230-90ea-e91c5f0da486 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 793.122162] env[61898]: DEBUG nova.compute.manager [req-94ff68c3-6e9c-40f1-b6a3-cc4c0f80eddf req-4e614214-9506-4974-83cc-6a40ebb63b11 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Refreshing instance network info cache due to event network-changed-9f33f2c4-4626-4230-90ea-e91c5f0da486. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 793.122399] env[61898]: DEBUG oslo_concurrency.lockutils [req-94ff68c3-6e9c-40f1-b6a3-cc4c0f80eddf req-4e614214-9506-4974-83cc-6a40ebb63b11 service nova] Acquiring lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.123089] env[61898]: DEBUG oslo_concurrency.lockutils [req-94ff68c3-6e9c-40f1-b6a3-cc4c0f80eddf req-4e614214-9506-4974-83cc-6a40ebb63b11 service nova] Acquired lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.123089] env[61898]: DEBUG nova.network.neutron [req-94ff68c3-6e9c-40f1-b6a3-cc4c0f80eddf req-4e614214-9506-4974-83cc-6a40ebb63b11 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Refreshing network info cache for port 9f33f2c4-4626-4230-90ea-e91c5f0da486 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 793.232115] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240593, 'name': CreateVM_Task, 'duration_secs': 0.343225} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.232337] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 793.233090] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.233259] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.233565] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 793.233929] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1829f397-da10-4d3c-a9f6-adac87da62b0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.242740] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 793.242740] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f808cf-85cd-2a20-0b1b-7e10b1cdd108" [ 793.242740] env[61898]: _type = "Task" [ 793.242740] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.246801] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240594, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.254220] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f808cf-85cd-2a20-0b1b-7e10b1cdd108, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.380614] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.381175] env[61898]: DEBUG nova.compute.manager [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 793.383828] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.299s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.385636] env[61898]: INFO nova.compute.claims [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.519552] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.575213] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Releasing lock "refresh_cache-ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.575726] env[61898]: DEBUG nova.compute.manager [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 793.575963] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 793.576956] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4eb409-b3b0-433a-8577-1d6f1bfe3f72 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.584659] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 793.584910] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21f3a81d-13e7-4260-b6f9-89e6b6222451 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.591892] env[61898]: DEBUG oslo_vmware.api [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 793.591892] env[61898]: value = "task-1240596" [ 793.591892] env[61898]: _type = "Task" [ 793.591892] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.600687] env[61898]: DEBUG oslo_vmware.api [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240596, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.606312] env[61898]: INFO nova.compute.manager [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Rebuilding instance [ 793.617569] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240595, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.654952] env[61898]: DEBUG nova.compute.manager [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 793.655831] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee59ae3d-231b-4537-a889-660c72e21c3d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.746124] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240594, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553902} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.746422] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 52a584e1-61ae-447d-90e0-e15d32a96314/52a584e1-61ae-447d-90e0-e15d32a96314.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.750023] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.750683] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9f7fdd4-c6ea-4274-87ae-c74d5413a1fd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.761914] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f808cf-85cd-2a20-0b1b-7e10b1cdd108, 'name': SearchDatastore_Task, 'duration_secs': 0.035489} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.763399] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.763642] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 793.763877] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.764031] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.764213] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 793.764528] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 793.764528] env[61898]: value = "task-1240597" [ 793.764528] env[61898]: _type = "Task" [ 793.764528] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.764714] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e0e18dd-5a26-427e-a52e-8acb61641dc3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.775981] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240597, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.777343] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 793.777524] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 793.778667] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2835078c-21c2-4ce8-8a13-36a3b2babb54 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.784160] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 793.784160] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]527ce37c-f037-cc6e-de5f-563b0a51a4cf" [ 793.784160] env[61898]: _type = "Task" [ 793.784160] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.792681] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527ce37c-f037-cc6e-de5f-563b0a51a4cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.893179] env[61898]: DEBUG nova.compute.utils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 793.895868] env[61898]: DEBUG nova.compute.manager [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 793.895868] env[61898]: DEBUG nova.network.neutron [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 793.959621] env[61898]: DEBUG nova.policy [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c19c696ddd074d20a6eb7344c714d2cd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '181c02c948844fa79ebc2d50a2de8d38', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 793.976046] env[61898]: DEBUG nova.network.neutron [req-94ff68c3-6e9c-40f1-b6a3-cc4c0f80eddf req-4e614214-9506-4974-83cc-6a40ebb63b11 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updated VIF entry in instance network info cache for port 9f33f2c4-4626-4230-90ea-e91c5f0da486. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 793.976439] env[61898]: DEBUG nova.network.neutron [req-94ff68c3-6e9c-40f1-b6a3-cc4c0f80eddf req-4e614214-9506-4974-83cc-6a40ebb63b11 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance_info_cache with network_info: [{"id": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "address": "fa:16:3e:42:f4:b3", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f33f2c4-46", "ovs_interfaceid": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.013524] env[61898]: DEBUG nova.compute.manager [req-488268bb-4639-4ae5-8d17-ba33ef7cad9b req-358384cd-5e8a-4a0f-ac44-748a6dac0e4a service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Received event network-vif-plugged-93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 794.013729] env[61898]: DEBUG oslo_concurrency.lockutils [req-488268bb-4639-4ae5-8d17-ba33ef7cad9b req-358384cd-5e8a-4a0f-ac44-748a6dac0e4a service nova] Acquiring lock "d6c96dce-13ae-411a-b52a-fee484718a8a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.013935] env[61898]: DEBUG oslo_concurrency.lockutils [req-488268bb-4639-4ae5-8d17-ba33ef7cad9b req-358384cd-5e8a-4a0f-ac44-748a6dac0e4a service nova] Lock "d6c96dce-13ae-411a-b52a-fee484718a8a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.014375] env[61898]: DEBUG oslo_concurrency.lockutils [req-488268bb-4639-4ae5-8d17-ba33ef7cad9b req-358384cd-5e8a-4a0f-ac44-748a6dac0e4a service nova] Lock "d6c96dce-13ae-411a-b52a-fee484718a8a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.014573] env[61898]: DEBUG nova.compute.manager [req-488268bb-4639-4ae5-8d17-ba33ef7cad9b req-358384cd-5e8a-4a0f-ac44-748a6dac0e4a service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] No waiting events found dispatching network-vif-plugged-93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 794.014752] env[61898]: WARNING nova.compute.manager [req-488268bb-4639-4ae5-8d17-ba33ef7cad9b req-358384cd-5e8a-4a0f-ac44-748a6dac0e4a service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Received unexpected event network-vif-plugged-93f00603-54ee-451f-9579-32f82d4923b0 for instance with vm_state building and task_state spawning. [ 794.103916] env[61898]: DEBUG oslo_vmware.api [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240596, 'name': PowerOffVM_Task, 'duration_secs': 0.132391} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.104285] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 794.104456] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 794.104876] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-296c2c2d-43e3-4dcf-866f-9c181371e30e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.117299] env[61898]: DEBUG oslo_vmware.api [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240595, 'name': PowerOnVM_Task, 'duration_secs': 0.776366} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.117562] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.117768] env[61898]: INFO nova.compute.manager [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Took 8.75 seconds to spawn the instance on the hypervisor. [ 794.117943] env[61898]: DEBUG nova.compute.manager [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 794.118808] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94db698b-116e-49b9-8b74-b6833b14875b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.136144] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 794.136144] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 794.136144] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Deleting the datastore file [datastore1] ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.136144] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1e10104-5ba0-4fff-b387-948e0e414608 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.139660] env[61898]: DEBUG oslo_vmware.api [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for the task: (returnval){ [ 794.139660] env[61898]: value = "task-1240599" [ 794.139660] env[61898]: _type = "Task" [ 794.139660] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.148949] env[61898]: DEBUG oslo_vmware.api [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240599, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.259124] env[61898]: DEBUG nova.network.neutron [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Successfully created port: c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 794.277178] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240597, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067972} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.277477] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 794.278273] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c6d4dc-c942-45e0-aee8-747cdcedb30d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.301333] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 52a584e1-61ae-447d-90e0-e15d32a96314/52a584e1-61ae-447d-90e0-e15d32a96314.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.305026] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52133a53-bd0e-4ea5-92bc-eab7e1759736 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.327955] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527ce37c-f037-cc6e-de5f-563b0a51a4cf, 'name': SearchDatastore_Task, 'duration_secs': 0.00951} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.329427] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 794.329427] env[61898]: value = "task-1240600" [ 794.329427] env[61898]: _type = "Task" [ 794.329427] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.329815] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-345eb8e6-6bb3-49d8-8b34-7b6dbbd8561c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.341374] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 794.341374] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5298ffa8-b0dd-84a7-2bbb-6281bff77238" [ 794.341374] env[61898]: _type = "Task" [ 794.341374] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.344942] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240600, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.353693] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5298ffa8-b0dd-84a7-2bbb-6281bff77238, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.397827] env[61898]: DEBUG nova.compute.manager [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 794.480131] env[61898]: DEBUG oslo_concurrency.lockutils [req-94ff68c3-6e9c-40f1-b6a3-cc4c0f80eddf req-4e614214-9506-4974-83cc-6a40ebb63b11 service nova] Releasing lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.486371] env[61898]: DEBUG nova.network.neutron [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Successfully updated port: 93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 794.617165] env[61898]: DEBUG nova.compute.manager [req-79a80d33-f273-4a4e-be74-91d8cccb2002 req-3d72b192-4bf1-46b0-9ad3-b1440ee4ed28 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Received event network-changed-93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 794.617370] env[61898]: DEBUG nova.compute.manager [req-79a80d33-f273-4a4e-be74-91d8cccb2002 req-3d72b192-4bf1-46b0-9ad3-b1440ee4ed28 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Refreshing instance network info cache due to event network-changed-93f00603-54ee-451f-9579-32f82d4923b0. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 794.617582] env[61898]: DEBUG oslo_concurrency.lockutils [req-79a80d33-f273-4a4e-be74-91d8cccb2002 req-3d72b192-4bf1-46b0-9ad3-b1440ee4ed28 service nova] Acquiring lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.617722] env[61898]: DEBUG oslo_concurrency.lockutils [req-79a80d33-f273-4a4e-be74-91d8cccb2002 req-3d72b192-4bf1-46b0-9ad3-b1440ee4ed28 service nova] Acquired lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.617877] env[61898]: DEBUG nova.network.neutron [req-79a80d33-f273-4a4e-be74-91d8cccb2002 req-3d72b192-4bf1-46b0-9ad3-b1440ee4ed28 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Refreshing network info cache for port 93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 794.637952] env[61898]: INFO nova.compute.manager [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Took 31.34 seconds to build instance. [ 794.651276] env[61898]: DEBUG oslo_vmware.api [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Task: {'id': task-1240599, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23782} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.654066] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 794.654331] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 794.654606] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 794.654904] env[61898]: INFO nova.compute.manager [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Took 1.08 seconds to destroy the instance on the hypervisor. [ 794.655219] env[61898]: DEBUG oslo.service.loopingcall [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 794.656969] env[61898]: DEBUG nova.compute.manager [-] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 794.656969] env[61898]: DEBUG nova.network.neutron [-] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 794.671143] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 794.671487] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03bfd0ca-7550-432e-9c6a-f7695c6d5cf1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.679104] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 794.679104] env[61898]: value = "task-1240601" [ 794.679104] env[61898]: _type = "Task" [ 794.679104] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.687460] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240601, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.694092] env[61898]: DEBUG nova.network.neutron [-] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.777275] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7decc111-c515-444a-825b-6811bc4a1c4a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.786315] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62664b2e-8015-451a-8917-04208f820fa2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.817446] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71f9c68-5706-4ab5-9835-981ea886c43d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.826339] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e96de54-539c-4ea5-897f-705577ee73b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.839876] env[61898]: DEBUG nova.compute.provider_tree [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.853681] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240600, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.860595] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5298ffa8-b0dd-84a7-2bbb-6281bff77238, 'name': SearchDatastore_Task, 'duration_secs': 0.019574} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.860595] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.860595] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 7c6aad92-6e91-48fc-89ae-5ee4c89f449c/7c6aad92-6e91-48fc-89ae-5ee4c89f449c.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 794.860728] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b347b346-3817-4f36-95bd-6befd3a1ad84 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.867103] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 794.867103] env[61898]: value = "task-1240602" [ 794.867103] env[61898]: _type = "Task" [ 794.867103] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.876281] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240602, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.987071] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquiring lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.139968] env[61898]: DEBUG oslo_concurrency.lockutils [None req-91d9bba8-07c1-4932-bc10-3c79ccf72e5e tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.485s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.188020] env[61898]: DEBUG nova.network.neutron [req-79a80d33-f273-4a4e-be74-91d8cccb2002 req-3d72b192-4bf1-46b0-9ad3-b1440ee4ed28 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.194851] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240601, 'name': PowerOffVM_Task, 'duration_secs': 0.21321} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.195158] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 795.195438] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 795.196340] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6490b96c-1539-45ab-b5b6-95532ffc5a66 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.200270] env[61898]: DEBUG nova.network.neutron [-] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.207606] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 795.208302] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b8494a1-8f91-4d14-8ca8-dc7a52f4ffde {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.281221] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 795.281468] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 795.281652] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleting the datastore file [datastore1] b106ab9e-08d4-4d18-90e0-13a071c9efb1 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 795.281974] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-892e11ca-bd2a-4719-8cbc-ed67c5a4bef5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.290345] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 795.290345] env[61898]: value = "task-1240604" [ 795.290345] env[61898]: _type = "Task" [ 795.290345] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.300597] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240604, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.349080] env[61898]: DEBUG nova.scheduler.client.report [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 795.366273] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240600, 'name': ReconfigVM_Task, 'duration_secs': 0.619183} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.366708] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 52a584e1-61ae-447d-90e0-e15d32a96314/52a584e1-61ae-447d-90e0-e15d32a96314.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.367616] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb475d0b-68b4-4703-9a1c-d09937f91cf9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.386364] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240602, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.388328] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 795.388328] env[61898]: value = "task-1240605" [ 795.388328] env[61898]: _type = "Task" [ 795.388328] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.393264] env[61898]: DEBUG nova.network.neutron [req-79a80d33-f273-4a4e-be74-91d8cccb2002 req-3d72b192-4bf1-46b0-9ad3-b1440ee4ed28 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.401028] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240605, 'name': Rename_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.409999] env[61898]: DEBUG nova.compute.manager [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 795.440103] env[61898]: DEBUG nova.virt.hardware [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 795.440387] env[61898]: DEBUG nova.virt.hardware [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 795.440608] env[61898]: DEBUG nova.virt.hardware [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 795.440824] env[61898]: DEBUG nova.virt.hardware [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 795.440987] env[61898]: DEBUG nova.virt.hardware [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 795.441338] env[61898]: DEBUG nova.virt.hardware [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 795.441619] env[61898]: DEBUG nova.virt.hardware [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 795.441792] env[61898]: DEBUG nova.virt.hardware [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 795.441993] env[61898]: DEBUG nova.virt.hardware [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 795.442186] env[61898]: DEBUG nova.virt.hardware [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 795.442408] env[61898]: DEBUG nova.virt.hardware [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 795.443494] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c39ec1-edc5-4fa9-9062-766317fbef22 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.454172] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b339782-6c32-4802-af97-0780bc3267f3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.643864] env[61898]: DEBUG nova.compute.manager [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 795.704773] env[61898]: INFO nova.compute.manager [-] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Took 1.05 seconds to deallocate network for instance. [ 795.772374] env[61898]: DEBUG nova.compute.manager [req-7e087cdf-f6b2-4cd8-95ae-cbd06a648532 req-ab4b195b-759c-4ba4-aaf2-0f917baba15c service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received event network-changed-8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 795.772835] env[61898]: DEBUG nova.compute.manager [req-7e087cdf-f6b2-4cd8-95ae-cbd06a648532 req-ab4b195b-759c-4ba4-aaf2-0f917baba15c service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Refreshing instance network info cache due to event network-changed-8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 795.772835] env[61898]: DEBUG oslo_concurrency.lockutils [req-7e087cdf-f6b2-4cd8-95ae-cbd06a648532 req-ab4b195b-759c-4ba4-aaf2-0f917baba15c service nova] Acquiring lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.772835] env[61898]: DEBUG oslo_concurrency.lockutils [req-7e087cdf-f6b2-4cd8-95ae-cbd06a648532 req-ab4b195b-759c-4ba4-aaf2-0f917baba15c service nova] Acquired lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.773133] env[61898]: DEBUG nova.network.neutron [req-7e087cdf-f6b2-4cd8-95ae-cbd06a648532 req-ab4b195b-759c-4ba4-aaf2-0f917baba15c service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Refreshing network info cache for port 8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.800855] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240604, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268875} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.801647] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 795.801872] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 795.802057] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 795.860221] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.860754] env[61898]: DEBUG nova.compute.manager [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 795.863249] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.896s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.866248] env[61898]: INFO nova.compute.claims [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.889925] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240602, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612843} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.894148] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 7c6aad92-6e91-48fc-89ae-5ee4c89f449c/7c6aad92-6e91-48fc-89ae-5ee4c89f449c.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 795.894490] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 795.894799] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-305b8997-6b4b-4304-81a6-7c1bc0f77c3c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.897975] env[61898]: DEBUG oslo_concurrency.lockutils [req-79a80d33-f273-4a4e-be74-91d8cccb2002 req-3d72b192-4bf1-46b0-9ad3-b1440ee4ed28 service nova] Releasing lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.898437] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquired lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.898636] env[61898]: DEBUG nova.network.neutron [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 795.905673] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240605, 'name': Rename_Task, 'duration_secs': 0.150702} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.907812] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 795.909682] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 795.909682] env[61898]: value = "task-1240606" [ 795.909682] env[61898]: _type = "Task" [ 795.909682] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.910515] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6e1e139-f6cb-40b4-925a-eafa7a6d9366 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.919672] env[61898]: DEBUG nova.network.neutron [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Successfully updated port: c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 795.928845] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240606, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.930916] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 795.930916] env[61898]: value = "task-1240607" [ 795.930916] env[61898]: _type = "Task" [ 795.930916] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.940981] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240607, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.171329] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.211441] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.378295] env[61898]: DEBUG nova.compute.utils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 796.380684] env[61898]: DEBUG nova.compute.manager [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 796.380861] env[61898]: DEBUG nova.network.neutron [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 796.424651] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquiring lock "refresh_cache-b709df92-bf56-40ed-ba48-a8fa19be8b68" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.424970] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquired lock "refresh_cache-b709df92-bf56-40ed-ba48-a8fa19be8b68" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.424970] env[61898]: DEBUG nova.network.neutron [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 796.426235] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240606, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107668} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.426533] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 796.427498] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59540d8b-6a37-407f-b701-b89f2a9e9256 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.453647] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 7c6aad92-6e91-48fc-89ae-5ee4c89f449c/7c6aad92-6e91-48fc-89ae-5ee4c89f449c.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 796.454755] env[61898]: DEBUG nova.network.neutron [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 796.459650] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81e85147-0c63-4417-b7d4-1581d98b844c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.479342] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240607, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.485544] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 796.485544] env[61898]: value = "task-1240608" [ 796.485544] env[61898]: _type = "Task" [ 796.485544] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.494540] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240608, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.500117] env[61898]: DEBUG nova.policy [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee555b0896f748d1886e7037911db84f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c2e835a924c438287e7626c34c2fb05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 796.706879] env[61898]: DEBUG nova.network.neutron [req-7e087cdf-f6b2-4cd8-95ae-cbd06a648532 req-ab4b195b-759c-4ba4-aaf2-0f917baba15c service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updated VIF entry in instance network info cache for port 8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 796.707273] env[61898]: DEBUG nova.network.neutron [req-7e087cdf-f6b2-4cd8-95ae-cbd06a648532 req-ab4b195b-759c-4ba4-aaf2-0f917baba15c service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [{"id": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "address": "fa:16:3e:40:36:b4", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eab7c47-4a", "ovs_interfaceid": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.713714] env[61898]: DEBUG nova.network.neutron [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updating instance_info_cache with network_info: [{"id": "93f00603-54ee-451f-9579-32f82d4923b0", "address": "fa:16:3e:45:b7:24", "network": {"id": "48f12b71-35b5-474e-bfdb-7ef68e2d7b22", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-66759866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f368913c359420cbd16ef48aa83e27c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f00603-54", "ovs_interfaceid": "93f00603-54ee-451f-9579-32f82d4923b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.855203] env[61898]: DEBUG nova.virt.hardware [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 796.855203] env[61898]: DEBUG nova.virt.hardware [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 796.855203] env[61898]: DEBUG nova.virt.hardware [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 796.855203] env[61898]: DEBUG nova.virt.hardware [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 796.855203] env[61898]: DEBUG nova.virt.hardware [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 796.856041] env[61898]: DEBUG nova.virt.hardware [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 796.856418] env[61898]: DEBUG nova.virt.hardware [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 796.856958] env[61898]: DEBUG nova.virt.hardware [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 796.857307] env[61898]: DEBUG nova.virt.hardware [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 796.858282] env[61898]: DEBUG nova.virt.hardware [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 796.858282] env[61898]: DEBUG nova.virt.hardware [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 796.859760] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe796033-b801-4250-a6c5-b8a48a898ea2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.871349] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc2bd21e-a2cc-4f8a-b87d-75d56795a4c5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.889698] env[61898]: DEBUG nova.compute.manager [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 796.892530] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:11:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '238d786d-b9f2-4cbb-86ed-2508303aa88f', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 796.900082] env[61898]: DEBUG oslo.service.loopingcall [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 796.901478] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 796.901478] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0439755a-a2b1-4559-aec3-7e1126edd751 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.926870] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 796.926870] env[61898]: value = "task-1240609" [ 796.926870] env[61898]: _type = "Task" [ 796.926870] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.940973] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240609, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.958935] env[61898]: DEBUG oslo_vmware.api [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240607, 'name': PowerOnVM_Task, 'duration_secs': 0.573622} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.962030] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 796.962367] env[61898]: INFO nova.compute.manager [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Took 9.14 seconds to spawn the instance on the hypervisor. [ 796.962430] env[61898]: DEBUG nova.compute.manager [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 796.963482] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d1f5cd-21be-4a47-9759-50e549bac63a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.996258] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240608, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.999512] env[61898]: DEBUG nova.network.neutron [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.015713] env[61898]: DEBUG nova.network.neutron [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Successfully created port: 536edbc0-179d-441d-8f00-c0a46d9589e2 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 797.211433] env[61898]: DEBUG oslo_concurrency.lockutils [req-7e087cdf-f6b2-4cd8-95ae-cbd06a648532 req-ab4b195b-759c-4ba4-aaf2-0f917baba15c service nova] Releasing lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.216139] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Releasing lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.216562] env[61898]: DEBUG nova.compute.manager [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Instance network_info: |[{"id": "93f00603-54ee-451f-9579-32f82d4923b0", "address": "fa:16:3e:45:b7:24", "network": {"id": "48f12b71-35b5-474e-bfdb-7ef68e2d7b22", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-66759866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f368913c359420cbd16ef48aa83e27c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f00603-54", "ovs_interfaceid": "93f00603-54ee-451f-9579-32f82d4923b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 797.217430] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:b7:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f4a795c-8718-4a7c-aafe-9da231df10f8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93f00603-54ee-451f-9579-32f82d4923b0', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.226152] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Creating folder: Project (5f368913c359420cbd16ef48aa83e27c). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.228530] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f757eb87-a787-497b-8e5a-44b17c856428 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.244401] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Created folder: Project (5f368913c359420cbd16ef48aa83e27c) in parent group-v267550. [ 797.244697] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Creating folder: Instances. Parent ref: group-v267625. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.244996] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a09ab1e3-39b0-41fc-92af-78c06847fe63 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.255912] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Created folder: Instances in parent group-v267625. [ 797.256236] env[61898]: DEBUG oslo.service.loopingcall [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.256448] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 797.259770] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55ab31f3-b9fd-470f-9b5d-2ee7a63c89dc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.278912] env[61898]: DEBUG nova.network.neutron [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Updating instance_info_cache with network_info: [{"id": "c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9", "address": "fa:16:3e:95:f9:68", "network": {"id": "3e2ea5da-f899-4f6b-814b-93c4bab1f489", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-925043666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "181c02c948844fa79ebc2d50a2de8d38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d70692eb-97b3-417c-a4ca-1ee888246ad9", "external-id": "nsx-vlan-transportzone-342", "segmentation_id": 342, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3bb7681-34", "ovs_interfaceid": "c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.285543] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.285543] env[61898]: value = "task-1240612" [ 797.285543] env[61898]: _type = "Task" [ 797.285543] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.302192] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240612, 'name': CreateVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.336092] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52e2a10-ea7b-4558-9d8c-992f396c99fc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.346726] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d7e5ca-ffc7-41bf-a561-da3d656729ca {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.381553] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1effbb86-c881-468c-8b28-e90d51e692b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.392237] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393035ac-d019-4118-a1f8-a6875199e38f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.407765] env[61898]: DEBUG nova.compute.provider_tree [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.437939] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240609, 'name': CreateVM_Task, 'duration_secs': 0.39394} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.438260] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 797.438881] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.439069] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.439432] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 797.439742] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b383398b-1be7-473a-affa-cb5ae69a025b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.447361] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 797.447361] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5225679b-7c42-bd0a-cd08-1596a2664cd8" [ 797.447361] env[61898]: _type = "Task" [ 797.447361] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.455333] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5225679b-7c42-bd0a-cd08-1596a2664cd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.487942] env[61898]: INFO nova.compute.manager [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Took 33.28 seconds to build instance. [ 797.504360] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240608, 'name': ReconfigVM_Task, 'duration_secs': 0.781361} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.504360] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 7c6aad92-6e91-48fc-89ae-5ee4c89f449c/7c6aad92-6e91-48fc-89ae-5ee4c89f449c.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 797.504938] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d24bf199-3905-46d6-8347-b71f19f74c54 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.513295] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 797.513295] env[61898]: value = "task-1240613" [ 797.513295] env[61898]: _type = "Task" [ 797.513295] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.527465] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240613, 'name': Rename_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.786091] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Releasing lock "refresh_cache-b709df92-bf56-40ed-ba48-a8fa19be8b68" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.786432] env[61898]: DEBUG nova.compute.manager [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Instance network_info: |[{"id": "c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9", "address": "fa:16:3e:95:f9:68", "network": {"id": "3e2ea5da-f899-4f6b-814b-93c4bab1f489", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-925043666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "181c02c948844fa79ebc2d50a2de8d38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d70692eb-97b3-417c-a4ca-1ee888246ad9", "external-id": "nsx-vlan-transportzone-342", "segmentation_id": 342, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3bb7681-34", "ovs_interfaceid": "c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 797.786999] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:f9:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd70692eb-97b3-417c-a4ca-1ee888246ad9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.796113] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Creating folder: Project (181c02c948844fa79ebc2d50a2de8d38). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.803424] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-55455ba6-5870-4c52-8fc6-13722a8197ad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.808344] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240612, 'name': CreateVM_Task, 'duration_secs': 0.460828} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.808557] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 797.809279] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.812486] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Created folder: Project (181c02c948844fa79ebc2d50a2de8d38) in parent group-v267550. [ 797.812486] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Creating folder: Instances. Parent ref: group-v267628. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.812857] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ba07caf-b5f7-4e64-aea0-3b6286736e5f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.824610] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Created folder: Instances in parent group-v267628. [ 797.824610] env[61898]: DEBUG oslo.service.loopingcall [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.824755] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 797.824934] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ebddf19-8541-491f-bfa2-10ff68c2010d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.846781] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.846781] env[61898]: value = "task-1240616" [ 797.846781] env[61898]: _type = "Task" [ 797.846781] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.858240] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240616, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.902473] env[61898]: DEBUG nova.compute.manager [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Received event network-vif-plugged-c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 797.902749] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] Acquiring lock "b709df92-bf56-40ed-ba48-a8fa19be8b68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.902959] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] Lock "b709df92-bf56-40ed-ba48-a8fa19be8b68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.905029] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] Lock "b709df92-bf56-40ed-ba48-a8fa19be8b68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.905222] env[61898]: DEBUG nova.compute.manager [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] No waiting events found dispatching network-vif-plugged-c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 797.905425] env[61898]: WARNING nova.compute.manager [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Received unexpected event network-vif-plugged-c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9 for instance with vm_state building and task_state spawning. [ 797.905703] env[61898]: DEBUG nova.compute.manager [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Received event network-changed-c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 797.905966] env[61898]: DEBUG nova.compute.manager [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Refreshing instance network info cache due to event network-changed-c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 797.906783] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] Acquiring lock "refresh_cache-b709df92-bf56-40ed-ba48-a8fa19be8b68" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.906783] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] Acquired lock "refresh_cache-b709df92-bf56-40ed-ba48-a8fa19be8b68" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.906783] env[61898]: DEBUG nova.network.neutron [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Refreshing network info cache for port c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 797.914024] env[61898]: DEBUG nova.compute.manager [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 797.914299] env[61898]: DEBUG nova.scheduler.client.report [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 797.947863] env[61898]: DEBUG nova.virt.hardware [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 797.948407] env[61898]: DEBUG nova.virt.hardware [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 797.948641] env[61898]: DEBUG nova.virt.hardware [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.948845] env[61898]: DEBUG nova.virt.hardware [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 797.948995] env[61898]: DEBUG nova.virt.hardware [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.949159] env[61898]: DEBUG nova.virt.hardware [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 797.949370] env[61898]: DEBUG nova.virt.hardware [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 797.949530] env[61898]: DEBUG nova.virt.hardware [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 797.949696] env[61898]: DEBUG nova.virt.hardware [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 797.949955] env[61898]: DEBUG nova.virt.hardware [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 797.950210] env[61898]: DEBUG nova.virt.hardware [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 797.951416] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0bb7f61-72aa-4720-80d4-ff1be357a329 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.966310] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef2512c-3480-493c-b6c8-4aefd6592d67 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.970373] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5225679b-7c42-bd0a-cd08-1596a2664cd8, 'name': SearchDatastore_Task, 'duration_secs': 0.010255} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.970779] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.970779] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 797.970886] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 797.971088] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.972967] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 797.972967] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.972967] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 797.972967] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45a2853e-1c3f-4ff7-ae84-f04faeba2a77 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.981629] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-637d4f30-cba1-4d10-9714-79c0a6092628 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.987533] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 797.987533] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528f149b-aa59-60a8-0331-ee3f300275d2" [ 797.987533] env[61898]: _type = "Task" [ 797.987533] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.991531] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 797.991706] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 797.992689] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b82b4611-6459-42ad-96b0-4e078decfe85 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.997966] env[61898]: DEBUG oslo_concurrency.lockutils [None req-38243af5-9195-4e48-8a64-033e1c815473 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "52a584e1-61ae-447d-90e0-e15d32a96314" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.317s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.998232] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]528f149b-aa59-60a8-0331-ee3f300275d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.001141] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 798.001141] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]522e77b2-3b8a-7b5f-1d2d-0b9b65dd60e8" [ 798.001141] env[61898]: _type = "Task" [ 798.001141] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.008409] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522e77b2-3b8a-7b5f-1d2d-0b9b65dd60e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.021335] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240613, 'name': Rename_Task, 'duration_secs': 0.215965} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.021596] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 798.021822] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-718af8f1-2450-4b8b-9e12-6777670dd686 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.028363] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 798.028363] env[61898]: value = "task-1240617" [ 798.028363] env[61898]: _type = "Task" [ 798.028363] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.037342] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240617, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.362719] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240616, 'name': CreateVM_Task, 'duration_secs': 0.344601} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.363251] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 798.364339] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.423282] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.557s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.423282] env[61898]: DEBUG nova.compute.manager [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 798.426137] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.569s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.428013] env[61898]: INFO nova.compute.claims [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.504020] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]528f149b-aa59-60a8-0331-ee3f300275d2, 'name': SearchDatastore_Task, 'duration_secs': 0.018891} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.504020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.504020] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 798.504020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.504020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.504020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 798.507288] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a5f3022-2275-4a39-b14e-40ef1c7c0a10 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.519393] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522e77b2-3b8a-7b5f-1d2d-0b9b65dd60e8, 'name': SearchDatastore_Task, 'duration_secs': 0.008147} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.524472] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for the task: (returnval){ [ 798.524472] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52816a35-0b06-18a6-66b5-5ce91ae96f70" [ 798.524472] env[61898]: _type = "Task" [ 798.524472] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.524751] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ec62730-3403-4395-b3cf-bfbd641101a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.535366] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 798.535366] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5251333e-ce06-8799-d543-7e118b66ed72" [ 798.535366] env[61898]: _type = "Task" [ 798.535366] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.543560] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52816a35-0b06-18a6-66b5-5ce91ae96f70, 'name': SearchDatastore_Task, 'duration_secs': 0.008713} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.547828] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.548104] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 798.548723] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.548723] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240617, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.555491] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5251333e-ce06-8799-d543-7e118b66ed72, 'name': SearchDatastore_Task, 'duration_secs': 0.009714} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.555780] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 798.555991] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] b106ab9e-08d4-4d18-90e0-13a071c9efb1/b106ab9e-08d4-4d18-90e0-13a071c9efb1.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 798.556596] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.556596] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 798.556821] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07f10255-4469-4760-9d76-7a8360b66fbe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.558933] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e805972d-3684-426b-8d4c-6a7e0e76b0d4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.569606] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 798.569606] env[61898]: value = "task-1240618" [ 798.569606] env[61898]: _type = "Task" [ 798.569606] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.574288] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 798.576017] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 798.576017] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4a612a2-ff5f-47b8-ba3c-9a0fc19b2a68 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.583777] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.588329] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 798.588329] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52fdc288-cac3-53ed-4036-0bc82938e9e7" [ 798.588329] env[61898]: _type = "Task" [ 798.588329] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.597896] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52fdc288-cac3-53ed-4036-0bc82938e9e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.617522] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "52a584e1-61ae-447d-90e0-e15d32a96314" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.619633] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "52a584e1-61ae-447d-90e0-e15d32a96314" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.619633] env[61898]: DEBUG nova.compute.manager [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 798.619633] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d8dfd4-ddca-459b-9c52-ae388b30a37c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.628192] env[61898]: DEBUG nova.compute.manager [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61898) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 798.628900] env[61898]: DEBUG nova.objects.instance [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lazy-loading 'flavor' on Instance uuid 52a584e1-61ae-447d-90e0-e15d32a96314 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.685365] env[61898]: DEBUG oslo_concurrency.lockutils [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.685605] env[61898]: DEBUG oslo_concurrency.lockutils [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.692047] env[61898]: DEBUG nova.network.neutron [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Updated VIF entry in instance network info cache for port c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 798.692047] env[61898]: DEBUG nova.network.neutron [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Updating instance_info_cache with network_info: [{"id": "c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9", "address": "fa:16:3e:95:f9:68", "network": {"id": "3e2ea5da-f899-4f6b-814b-93c4bab1f489", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-925043666-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "181c02c948844fa79ebc2d50a2de8d38", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d70692eb-97b3-417c-a4ca-1ee888246ad9", "external-id": "nsx-vlan-transportzone-342", "segmentation_id": 342, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3bb7681-34", "ovs_interfaceid": "c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.928201] env[61898]: DEBUG nova.compute.utils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 798.929676] env[61898]: DEBUG nova.compute.manager [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 798.929818] env[61898]: DEBUG nova.network.neutron [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 798.986773] env[61898]: DEBUG nova.policy [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e74724b381542e0be0664c9256ecc3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6198f817d1b471483500fe05c9bef3f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 799.050360] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "eda63357-6749-4652-914a-dc5b69163eb6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.051164] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "eda63357-6749-4652-914a-dc5b69163eb6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.057520] env[61898]: DEBUG oslo_vmware.api [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240617, 'name': PowerOnVM_Task, 'duration_secs': 0.680473} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.058105] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 799.058358] env[61898]: INFO nova.compute.manager [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Took 8.76 seconds to spawn the instance on the hypervisor. [ 799.058549] env[61898]: DEBUG nova.compute.manager [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 799.059535] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bbac4f-1ca4-46e6-88f1-d64490d0bc76 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.087279] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240618, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490044} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.088733] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] b106ab9e-08d4-4d18-90e0-13a071c9efb1/b106ab9e-08d4-4d18-90e0-13a071c9efb1.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 799.089054] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 799.091807] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af99cee7-a221-4367-8b27-0f30c6eab726 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.098021] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.098021] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.107070] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52fdc288-cac3-53ed-4036-0bc82938e9e7, 'name': SearchDatastore_Task, 'duration_secs': 0.009596} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.109477] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 799.109477] env[61898]: value = "task-1240619" [ 799.109477] env[61898]: _type = "Task" [ 799.109477] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.109679] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b621383-e479-46f2-92f3-0b2bcb7f75b1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.119647] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 799.119647] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52560e76-8623-d95d-8287-234280968c69" [ 799.119647] env[61898]: _type = "Task" [ 799.119647] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.126608] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240619, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.140484] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52560e76-8623-d95d-8287-234280968c69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.178038] env[61898]: DEBUG nova.network.neutron [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Successfully updated port: 536edbc0-179d-441d-8f00-c0a46d9589e2 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 799.189850] env[61898]: DEBUG nova.compute.utils [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 799.194502] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fba49d1-2afb-4a09-a85d-c6995c89a096 req-d1cbaa70-c963-4920-b0f9-241b0a3f0d96 service nova] Releasing lock "refresh_cache-b709df92-bf56-40ed-ba48-a8fa19be8b68" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.316668] env[61898]: DEBUG nova.network.neutron [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Successfully created port: cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.322558] env[61898]: DEBUG nova.compute.manager [req-af7f052f-584e-455b-a6ff-120bf2fbe2d0 req-107c9e35-70bf-4dde-a270-d1c7562589f7 service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Received event network-vif-plugged-536edbc0-179d-441d-8f00-c0a46d9589e2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 799.322769] env[61898]: DEBUG oslo_concurrency.lockutils [req-af7f052f-584e-455b-a6ff-120bf2fbe2d0 req-107c9e35-70bf-4dde-a270-d1c7562589f7 service nova] Acquiring lock "4db53fdf-7107-43c5-a57c-65d54b807909-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.323156] env[61898]: DEBUG oslo_concurrency.lockutils [req-af7f052f-584e-455b-a6ff-120bf2fbe2d0 req-107c9e35-70bf-4dde-a270-d1c7562589f7 service nova] Lock "4db53fdf-7107-43c5-a57c-65d54b807909-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.323156] env[61898]: DEBUG oslo_concurrency.lockutils [req-af7f052f-584e-455b-a6ff-120bf2fbe2d0 req-107c9e35-70bf-4dde-a270-d1c7562589f7 service nova] Lock "4db53fdf-7107-43c5-a57c-65d54b807909-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.323331] env[61898]: DEBUG nova.compute.manager [req-af7f052f-584e-455b-a6ff-120bf2fbe2d0 req-107c9e35-70bf-4dde-a270-d1c7562589f7 service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] No waiting events found dispatching network-vif-plugged-536edbc0-179d-441d-8f00-c0a46d9589e2 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 799.323476] env[61898]: WARNING nova.compute.manager [req-af7f052f-584e-455b-a6ff-120bf2fbe2d0 req-107c9e35-70bf-4dde-a270-d1c7562589f7 service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Received unexpected event network-vif-plugged-536edbc0-179d-441d-8f00-c0a46d9589e2 for instance with vm_state building and task_state spawning. [ 799.434733] env[61898]: DEBUG nova.compute.manager [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 799.560625] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 799.596017] env[61898]: INFO nova.compute.manager [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Took 33.34 seconds to build instance. [ 799.629796] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240619, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140449} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.632590] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 799.635093] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc25d266-7c70-4942-8445-1cdcf9642034 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.641692] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52560e76-8623-d95d-8287-234280968c69, 'name': SearchDatastore_Task, 'duration_secs': 0.024075} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.642448] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.642854] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] d6c96dce-13ae-411a-b52a-fee484718a8a/d6c96dce-13ae-411a-b52a-fee484718a8a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 799.643774] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 799.644225] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.644669] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 799.645098] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7167c761-9513-4347-8b61-0a5f85bfe921 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.657528] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a263556-e3de-4655-bd49-5dd54a3036b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.659387] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adf3d6c4-9dbf-4c44-be30-45ecac377298 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.670898] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] b106ab9e-08d4-4d18-90e0-13a071c9efb1/b106ab9e-08d4-4d18-90e0-13a071c9efb1.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 799.675143] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d0d700d-42f5-4a95-9ea5-580a36c51d01 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.695552] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.695710] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.696084] env[61898]: DEBUG nova.network.neutron [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.697662] env[61898]: DEBUG oslo_concurrency.lockutils [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.012s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.698476] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 799.698476] env[61898]: value = "task-1240620" [ 799.698476] env[61898]: _type = "Task" [ 799.698476] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.709247] env[61898]: DEBUG oslo_vmware.api [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 799.709247] env[61898]: value = "task-1240621" [ 799.709247] env[61898]: _type = "Task" [ 799.709247] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.720942] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240620, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.725678] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 799.725678] env[61898]: value = "task-1240622" [ 799.725678] env[61898]: _type = "Task" [ 799.725678] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.726230] env[61898]: DEBUG oslo_vmware.api [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.726500] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 799.726702] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 799.732643] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41fc72f6-79c6-4b3d-9868-4f6763c72c88 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.741686] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for the task: (returnval){ [ 799.741686] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]526dacb8-71d2-5f6c-d434-f5c748f80631" [ 799.741686] env[61898]: _type = "Task" [ 799.741686] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.744902] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240622, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.761577] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]526dacb8-71d2-5f6c-d434-f5c748f80631, 'name': SearchDatastore_Task, 'duration_secs': 0.016675} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.766662] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec286257-6be8-4eed-9dea-15116a4300bc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.773261] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for the task: (returnval){ [ 799.773261] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5224b8db-2fae-6d04-8feb-e5a47faf74a7" [ 799.773261] env[61898]: _type = "Task" [ 799.773261] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.786082] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5224b8db-2fae-6d04-8feb-e5a47faf74a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.896076] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd248822-702d-467a-b94c-7207f3a4aa88 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.907025] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24eec114-8043-4d46-852f-d1dd697ceda1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.942492] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738a3b74-a1aa-40e8-be51-d1d076594efa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.956682] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e52fdc2-9af3-4423-bde7-f0010274d4fc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.973805] env[61898]: DEBUG nova.compute.provider_tree [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.086899] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.095742] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b157d4f5-42fd-486a-b5e4-faa06aff0051 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.157s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.217357] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240620, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.223612] env[61898]: DEBUG oslo_vmware.api [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240621, 'name': PowerOffVM_Task, 'duration_secs': 0.353465} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.223987] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 800.224344] env[61898]: DEBUG nova.compute.manager [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 800.227062] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7df2640-c85a-472e-bcf4-2f0b7dcf985e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.242929] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240622, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.280653] env[61898]: DEBUG nova.network.neutron [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.292047] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5224b8db-2fae-6d04-8feb-e5a47faf74a7, 'name': SearchDatastore_Task, 'duration_secs': 0.011755} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.292373] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.292634] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] b709df92-bf56-40ed-ba48-a8fa19be8b68/b709df92-bf56-40ed-ba48-a8fa19be8b68.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 800.292892] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e89ac70-df00-4000-97d1-b14e5dc6244a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.301418] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for the task: (returnval){ [ 800.301418] env[61898]: value = "task-1240623" [ 800.301418] env[61898]: _type = "Task" [ 800.301418] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.314253] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.452635] env[61898]: DEBUG nova.compute.manager [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 800.476696] env[61898]: DEBUG nova.scheduler.client.report [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 800.484404] env[61898]: DEBUG nova.virt.hardware [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 800.484404] env[61898]: DEBUG nova.virt.hardware [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 800.484404] env[61898]: DEBUG nova.virt.hardware [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.484404] env[61898]: DEBUG nova.virt.hardware [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 800.484404] env[61898]: DEBUG nova.virt.hardware [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.484404] env[61898]: DEBUG nova.virt.hardware [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 800.484404] env[61898]: DEBUG nova.virt.hardware [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 800.484404] env[61898]: DEBUG nova.virt.hardware [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 800.484404] env[61898]: DEBUG nova.virt.hardware [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 800.484404] env[61898]: DEBUG nova.virt.hardware [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 800.484404] env[61898]: DEBUG nova.virt.hardware [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 800.485453] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e2b04f-f798-43b0-a756-4850fccd6e7d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.496290] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6b6c3a-de7e-4dab-9967-5d8ef422291f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.598559] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 800.609806] env[61898]: DEBUG nova.network.neutron [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Updating instance_info_cache with network_info: [{"id": "536edbc0-179d-441d-8f00-c0a46d9589e2", "address": "fa:16:3e:dd:ed:cc", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap536edbc0-17", "ovs_interfaceid": "536edbc0-179d-441d-8f00-c0a46d9589e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.715847] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240620, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556814} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.716154] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] d6c96dce-13ae-411a-b52a-fee484718a8a/d6c96dce-13ae-411a-b52a-fee484718a8a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 800.719071] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 800.719071] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f73bdc3b-e99b-4df4-b27d-53410d8163df {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.727792] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 800.727792] env[61898]: value = "task-1240624" [ 800.727792] env[61898]: _type = "Task" [ 800.727792] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.739167] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240624, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.746246] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240622, 'name': ReconfigVM_Task, 'duration_secs': 0.64675} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.746543] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Reconfigured VM instance instance-0000003e to attach disk [datastore2] b106ab9e-08d4-4d18-90e0-13a071c9efb1/b106ab9e-08d4-4d18-90e0-13a071c9efb1.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 800.749274] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67094f7c-1704-444c-897c-6c9af8f7b62c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.753150] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3101b777-1370-4083-a473-1f60e8d3af55 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "52a584e1-61ae-447d-90e0-e15d32a96314" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.135s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.757957] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 800.757957] env[61898]: value = "task-1240625" [ 800.757957] env[61898]: _type = "Task" [ 800.757957] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.766373] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240625, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.814028] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240623, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.826990] env[61898]: DEBUG oslo_concurrency.lockutils [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.827337] env[61898]: DEBUG oslo_concurrency.lockutils [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.827603] env[61898]: INFO nova.compute.manager [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Attaching volume bcbf2abe-9a1d-4f11-b843-5d7210e16392 to /dev/sdb [ 800.886847] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1032dde9-c675-424b-8bb0-a77d05f4305d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.897098] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5123874-2ecf-4d42-85b4-46052605c396 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.916966] env[61898]: DEBUG nova.virt.block_device [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Updating existing volume attachment record: 8264079d-af4f-42d7-8f81-aa05750478c6 {{(pid=61898) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 800.922499] env[61898]: DEBUG nova.compute.manager [req-4de14693-4cf1-46d6-90a9-fb175cf8577c req-819ab15e-9c12-4c64-a3ec-50b26301211e service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Received event network-vif-plugged-cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 800.922784] env[61898]: DEBUG oslo_concurrency.lockutils [req-4de14693-4cf1-46d6-90a9-fb175cf8577c req-819ab15e-9c12-4c64-a3ec-50b26301211e service nova] Acquiring lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.923080] env[61898]: DEBUG oslo_concurrency.lockutils [req-4de14693-4cf1-46d6-90a9-fb175cf8577c req-819ab15e-9c12-4c64-a3ec-50b26301211e service nova] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.923698] env[61898]: DEBUG oslo_concurrency.lockutils [req-4de14693-4cf1-46d6-90a9-fb175cf8577c req-819ab15e-9c12-4c64-a3ec-50b26301211e service nova] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.923698] env[61898]: DEBUG nova.compute.manager [req-4de14693-4cf1-46d6-90a9-fb175cf8577c req-819ab15e-9c12-4c64-a3ec-50b26301211e service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] No waiting events found dispatching network-vif-plugged-cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 800.924384] env[61898]: WARNING nova.compute.manager [req-4de14693-4cf1-46d6-90a9-fb175cf8577c req-819ab15e-9c12-4c64-a3ec-50b26301211e service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Received unexpected event network-vif-plugged-cd3bd232-226d-4ac0-a9f8-17b93aca92fb for instance with vm_state building and task_state spawning. [ 800.977392] env[61898]: DEBUG nova.network.neutron [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Successfully updated port: cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 800.990138] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.990572] env[61898]: DEBUG nova.compute.manager [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 800.996218] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.320s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.998465] env[61898]: INFO nova.compute.claims [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 801.115018] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.115018] env[61898]: DEBUG nova.compute.manager [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Instance network_info: |[{"id": "536edbc0-179d-441d-8f00-c0a46d9589e2", "address": "fa:16:3e:dd:ed:cc", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap536edbc0-17", "ovs_interfaceid": "536edbc0-179d-441d-8f00-c0a46d9589e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 801.115437] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:ed:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69f65356-c85e-4b7f-ad28-7c7b5e8cf50c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '536edbc0-179d-441d-8f00-c0a46d9589e2', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 801.124586] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Creating folder: Project (5c2e835a924c438287e7626c34c2fb05). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 801.124921] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-411c94ad-5732-4e9f-afe3-4ea3b3b6e551 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.130067] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.137199] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Created folder: Project (5c2e835a924c438287e7626c34c2fb05) in parent group-v267550. [ 801.137485] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Creating folder: Instances. Parent ref: group-v267631. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 801.137818] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4f5562d-d083-46cd-a373-e49d984477e8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.149848] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Created folder: Instances in parent group-v267631. [ 801.150202] env[61898]: DEBUG oslo.service.loopingcall [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 801.150389] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 801.150602] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-597f8ffb-fae1-4881-8fa8-c98c905f2a7d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.173218] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 801.173218] env[61898]: value = "task-1240629" [ 801.173218] env[61898]: _type = "Task" [ 801.173218] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.182560] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240629, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.236461] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240624, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.155528} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.236746] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 801.237558] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9595d94-b96c-4c09-9bfc-6737ee07b490 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.266428] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] d6c96dce-13ae-411a-b52a-fee484718a8a/d6c96dce-13ae-411a-b52a-fee484718a8a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 801.267596] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03448626-0a91-4355-b19d-e9d84134a4f7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.293234] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240625, 'name': Rename_Task, 'duration_secs': 0.22195} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.294648] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 801.294978] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 801.294978] env[61898]: value = "task-1240631" [ 801.294978] env[61898]: _type = "Task" [ 801.294978] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.295206] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e56202c0-3b91-4451-a89d-82defb3b8285 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.308333] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240631, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.313857] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 801.313857] env[61898]: value = "task-1240633" [ 801.313857] env[61898]: _type = "Task" [ 801.313857] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.320881] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240623, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549615} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.321708] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] b709df92-bf56-40ed-ba48-a8fa19be8b68/b709df92-bf56-40ed-ba48-a8fa19be8b68.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 801.322036] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 801.322377] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e49a3e5-07bd-4995-b3ef-5f36dec91eeb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.329028] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240633, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.336598] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for the task: (returnval){ [ 801.336598] env[61898]: value = "task-1240634" [ 801.336598] env[61898]: _type = "Task" [ 801.336598] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.347781] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240634, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.358163] env[61898]: DEBUG nova.compute.manager [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Received event network-changed-536edbc0-179d-441d-8f00-c0a46d9589e2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 801.358408] env[61898]: DEBUG nova.compute.manager [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Refreshing instance network info cache due to event network-changed-536edbc0-179d-441d-8f00-c0a46d9589e2. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 801.358629] env[61898]: DEBUG oslo_concurrency.lockutils [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] Acquiring lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.358783] env[61898]: DEBUG oslo_concurrency.lockutils [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] Acquired lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.358975] env[61898]: DEBUG nova.network.neutron [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Refreshing network info cache for port 536edbc0-179d-441d-8f00-c0a46d9589e2 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 801.482338] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.482338] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.482644] env[61898]: DEBUG nova.network.neutron [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.497850] env[61898]: DEBUG nova.compute.utils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 801.501763] env[61898]: DEBUG nova.compute.manager [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 801.501950] env[61898]: DEBUG nova.network.neutron [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 801.564021] env[61898]: DEBUG nova.policy [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b766eccf77349cab3c468606a183d0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4c406fb9a02748bfa2f24158ec5d6272', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 801.686372] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240629, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.813519] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240631, 'name': ReconfigVM_Task, 'duration_secs': 0.439451} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.813519] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Reconfigured VM instance instance-00000042 to attach disk [datastore2] d6c96dce-13ae-411a-b52a-fee484718a8a/d6c96dce-13ae-411a-b52a-fee484718a8a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 801.813519] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b5c9e2f-c4a9-4dce-b3c7-7bb341126cc9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.827295] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240633, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.828957] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 801.828957] env[61898]: value = "task-1240635" [ 801.828957] env[61898]: _type = "Task" [ 801.828957] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.838551] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240635, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.854060] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240634, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091841} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.854348] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 801.855253] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee699f9e-fe8f-4ff0-9038-2eaae8ce8ce0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.886160] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] b709df92-bf56-40ed-ba48-a8fa19be8b68/b709df92-bf56-40ed-ba48-a8fa19be8b68.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 801.887931] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43d52a12-85be-40cd-afa3-3078c0d00043 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.905800] env[61898]: DEBUG nova.network.neutron [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Successfully created port: 16ae0702-2627-4e8c-a2fc-a0e9d977bd4a {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 801.909867] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for the task: (returnval){ [ 801.909867] env[61898]: value = "task-1240636" [ 801.909867] env[61898]: _type = "Task" [ 801.909867] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.919440] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240636, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.004964] env[61898]: DEBUG nova.compute.manager [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 802.034010] env[61898]: DEBUG nova.network.neutron [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.193187] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240629, 'name': CreateVM_Task, 'duration_secs': 0.517721} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.193187] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 802.194254] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.194254] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.194476] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 802.197479] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4261da16-7c6f-47d5-82aa-ae4deadb7787 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.205596] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 802.205596] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52361ffb-3052-f8f3-e7c9-312bfb9a8fc9" [ 802.205596] env[61898]: _type = "Task" [ 802.205596] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.216144] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52361ffb-3052-f8f3-e7c9-312bfb9a8fc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.279658] env[61898]: DEBUG nova.network.neutron [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updating instance_info_cache with network_info: [{"id": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "address": "fa:16:3e:0e:f5:a4", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd3bd232-22", "ovs_interfaceid": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.305264] env[61898]: DEBUG nova.network.neutron [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Updated VIF entry in instance network info cache for port 536edbc0-179d-441d-8f00-c0a46d9589e2. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 802.305264] env[61898]: DEBUG nova.network.neutron [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Updating instance_info_cache with network_info: [{"id": "536edbc0-179d-441d-8f00-c0a46d9589e2", "address": "fa:16:3e:dd:ed:cc", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap536edbc0-17", "ovs_interfaceid": "536edbc0-179d-441d-8f00-c0a46d9589e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.333376] env[61898]: DEBUG oslo_vmware.api [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240633, 'name': PowerOnVM_Task, 'duration_secs': 0.545438} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.336927] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 802.337140] env[61898]: DEBUG nova.compute.manager [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 802.338166] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-204c2224-a99a-4f28-ad4e-2d31511b62d2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.356132] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240635, 'name': Rename_Task, 'duration_secs': 0.36402} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.357415] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 802.357415] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6229d105-e8db-4986-9f2e-d66ac22d6729 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.366389] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 802.366389] env[61898]: value = "task-1240637" [ 802.366389] env[61898]: _type = "Task" [ 802.366389] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.377669] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240637, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.421728] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.450029] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81910137-587d-4e8d-823e-92b57da0cad5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.459809] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f62925-9091-4c82-afbf-22ad1cb21b73 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.506477] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728bde5e-8e16-4d67-a883-aec1b7489b43 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.522110] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58962c14-1642-4ba0-b43f-60cbf4e434b4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.542852] env[61898]: DEBUG nova.compute.provider_tree [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.717348] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52361ffb-3052-f8f3-e7c9-312bfb9a8fc9, 'name': SearchDatastore_Task, 'duration_secs': 0.020711} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.717348] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.717711] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 802.717711] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.717796] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.717900] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 802.718195] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17c0cdab-442d-4b92-82ec-81a823dcf60d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.727820] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 802.728009] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 802.728702] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f975633-bf14-4b3a-9cc5-f0c865b6fc3c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.734519] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 802.734519] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525ad662-fb55-0428-f635-445f7dd655cf" [ 802.734519] env[61898]: _type = "Task" [ 802.734519] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.741598] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525ad662-fb55-0428-f635-445f7dd655cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.785070] env[61898]: DEBUG nova.compute.manager [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 802.785591] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.785883] env[61898]: DEBUG nova.compute.manager [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Instance network_info: |[{"id": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "address": "fa:16:3e:0e:f5:a4", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd3bd232-22", "ovs_interfaceid": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 802.786809] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-547bb09b-c83d-41e0-b875-cfb1a05e4cec {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.789706] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:f5:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd3bd232-226d-4ac0-a9f8-17b93aca92fb', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 802.797019] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating folder: Project (a6198f817d1b471483500fe05c9bef3f). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 802.797206] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64a9d66b-4443-4973-b074-67dc8fcb4dce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.807082] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Created folder: Project (a6198f817d1b471483500fe05c9bef3f) in parent group-v267550. [ 802.807231] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating folder: Instances. Parent ref: group-v267636. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 802.807536] env[61898]: DEBUG oslo_concurrency.lockutils [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] Releasing lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.807756] env[61898]: DEBUG nova.compute.manager [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Received event network-changed-9f33f2c4-4626-4230-90ea-e91c5f0da486 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 802.807917] env[61898]: DEBUG nova.compute.manager [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Refreshing instance network info cache due to event network-changed-9f33f2c4-4626-4230-90ea-e91c5f0da486. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 802.808160] env[61898]: DEBUG oslo_concurrency.lockutils [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] Acquiring lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.808310] env[61898]: DEBUG oslo_concurrency.lockutils [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] Acquired lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.808773] env[61898]: DEBUG nova.network.neutron [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Refreshing network info cache for port 9f33f2c4-4626-4230-90ea-e91c5f0da486 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 802.809490] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5deef403-bb23-4fe4-a6ec-8214dc3bf7eb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.818697] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Created folder: Instances in parent group-v267636. [ 802.818950] env[61898]: DEBUG oslo.service.loopingcall [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 802.819739] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 802.819964] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7677878a-7789-465f-ace2-214d419bee4c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.839766] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 802.839766] env[61898]: value = "task-1240640" [ 802.839766] env[61898]: _type = "Task" [ 802.839766] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.847403] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240640, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.867721] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.876353] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240637, 'name': PowerOnVM_Task} progress is 81%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.921053] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240636, 'name': ReconfigVM_Task, 'duration_secs': 0.776648} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.921053] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Reconfigured VM instance instance-00000043 to attach disk [datastore2] b709df92-bf56-40ed-ba48-a8fa19be8b68/b709df92-bf56-40ed-ba48-a8fa19be8b68.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 802.921724] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-771f51c2-b9f7-4db7-9fa5-e6239748addc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.928396] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for the task: (returnval){ [ 802.928396] env[61898]: value = "task-1240641" [ 802.928396] env[61898]: _type = "Task" [ 802.928396] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.936184] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240641, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.949122] env[61898]: DEBUG nova.compute.manager [req-1d5d57bd-6dac-4780-b942-a2aa65cea0ff req-e1ab1c3d-f830-4158-9470-7c1cbc357448 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Received event network-changed-cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 802.949322] env[61898]: DEBUG nova.compute.manager [req-1d5d57bd-6dac-4780-b942-a2aa65cea0ff req-e1ab1c3d-f830-4158-9470-7c1cbc357448 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Refreshing instance network info cache due to event network-changed-cd3bd232-226d-4ac0-a9f8-17b93aca92fb. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 802.950698] env[61898]: DEBUG oslo_concurrency.lockutils [req-1d5d57bd-6dac-4780-b942-a2aa65cea0ff req-e1ab1c3d-f830-4158-9470-7c1cbc357448 service nova] Acquiring lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.950908] env[61898]: DEBUG oslo_concurrency.lockutils [req-1d5d57bd-6dac-4780-b942-a2aa65cea0ff req-e1ab1c3d-f830-4158-9470-7c1cbc357448 service nova] Acquired lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.951137] env[61898]: DEBUG nova.network.neutron [req-1d5d57bd-6dac-4780-b942-a2aa65cea0ff req-e1ab1c3d-f830-4158-9470-7c1cbc357448 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Refreshing network info cache for port cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 803.022397] env[61898]: DEBUG nova.compute.manager [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 803.046284] env[61898]: DEBUG nova.scheduler.client.report [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 803.052932] env[61898]: DEBUG nova.virt.hardware [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 803.053216] env[61898]: DEBUG nova.virt.hardware [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 803.053376] env[61898]: DEBUG nova.virt.hardware [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 803.053569] env[61898]: DEBUG nova.virt.hardware [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 803.053709] env[61898]: DEBUG nova.virt.hardware [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 803.053856] env[61898]: DEBUG nova.virt.hardware [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 803.054077] env[61898]: DEBUG nova.virt.hardware [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 803.054242] env[61898]: DEBUG nova.virt.hardware [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 803.054414] env[61898]: DEBUG nova.virt.hardware [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 803.054573] env[61898]: DEBUG nova.virt.hardware [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 803.055025] env[61898]: DEBUG nova.virt.hardware [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 803.055651] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50193988-cd55-4fde-ab5c-6608df8f01da {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.066303] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b8bb33-0161-4cfb-9a21-84fd92f79393 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.246465] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525ad662-fb55-0428-f635-445f7dd655cf, 'name': SearchDatastore_Task, 'duration_secs': 0.040083} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.247318] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cac86578-845e-46e1-8240-cd0fbc7af24d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.253085] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 803.253085] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d5c45d-eb08-ca7e-dcc3-410f443585f0" [ 803.253085] env[61898]: _type = "Task" [ 803.253085] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.263302] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d5c45d-eb08-ca7e-dcc3-410f443585f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.307821] env[61898]: INFO nova.compute.manager [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] instance snapshotting [ 803.308410] env[61898]: WARNING nova.compute.manager [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 803.311402] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0392280-7c07-4b24-95c5-c4f1b0671063 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.332968] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3329e7f5-5ddd-4fe4-8da9-73e4cf6bcc61 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.350573] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240640, 'name': CreateVM_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.375968] env[61898]: DEBUG oslo_vmware.api [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240637, 'name': PowerOnVM_Task, 'duration_secs': 0.870409} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.376378] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 803.376562] env[61898]: INFO nova.compute.manager [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Took 10.43 seconds to spawn the instance on the hypervisor. [ 803.376787] env[61898]: DEBUG nova.compute.manager [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 803.377613] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8c9dc6-475d-494b-94d6-750badfe7e7f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.439412] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240641, 'name': Rename_Task, 'duration_secs': 0.225881} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.439862] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 803.439932] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1779bc8f-5855-40e6-9ce6-f0c190fed674 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.447856] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for the task: (returnval){ [ 803.447856] env[61898]: value = "task-1240643" [ 803.447856] env[61898]: _type = "Task" [ 803.447856] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.462829] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240643, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.560483] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.561068] env[61898]: DEBUG nova.compute.manager [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 803.567014] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.293s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.567305] env[61898]: DEBUG nova.objects.instance [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lazy-loading 'resources' on Instance uuid 29eadea9-fa85-4f51-97d0-a941e1658094 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 803.667417] env[61898]: DEBUG nova.network.neutron [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Successfully updated port: 16ae0702-2627-4e8c-a2fc-a0e9d977bd4a {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 803.730526] env[61898]: DEBUG nova.network.neutron [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updated VIF entry in instance network info cache for port 9f33f2c4-4626-4230-90ea-e91c5f0da486. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 803.730986] env[61898]: DEBUG nova.network.neutron [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance_info_cache with network_info: [{"id": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "address": "fa:16:3e:42:f4:b3", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f33f2c4-46", "ovs_interfaceid": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.765457] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d5c45d-eb08-ca7e-dcc3-410f443585f0, 'name': SearchDatastore_Task, 'duration_secs': 0.014353} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.773348] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.773348] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 4db53fdf-7107-43c5-a57c-65d54b807909/4db53fdf-7107-43c5-a57c-65d54b807909.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 803.777317] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4218dfbf-e5f2-4633-a3f0-bca09545e620 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.785846] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 803.785846] env[61898]: value = "task-1240644" [ 803.785846] env[61898]: _type = "Task" [ 803.785846] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.796980] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240644, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.822467] env[61898]: DEBUG nova.network.neutron [req-1d5d57bd-6dac-4780-b942-a2aa65cea0ff req-e1ab1c3d-f830-4158-9470-7c1cbc357448 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updated VIF entry in instance network info cache for port cd3bd232-226d-4ac0-a9f8-17b93aca92fb. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 803.822888] env[61898]: DEBUG nova.network.neutron [req-1d5d57bd-6dac-4780-b942-a2aa65cea0ff req-e1ab1c3d-f830-4158-9470-7c1cbc357448 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updating instance_info_cache with network_info: [{"id": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "address": "fa:16:3e:0e:f5:a4", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd3bd232-22", "ovs_interfaceid": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.846902] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 803.847199] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d8fba5d2-b5fe-4511-ab53-4ad7df98cf8e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.854276] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240640, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.855676] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 803.855676] env[61898]: value = "task-1240645" [ 803.855676] env[61898]: _type = "Task" [ 803.855676] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.863444] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240645, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.900312] env[61898]: INFO nova.compute.manager [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Took 33.41 seconds to build instance. [ 803.957226] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240643, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.071029] env[61898]: DEBUG nova.compute.utils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 804.075305] env[61898]: DEBUG nova.compute.manager [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 804.075545] env[61898]: DEBUG nova.network.neutron [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 804.131765] env[61898]: DEBUG nova.policy [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a2508b1f3f945459495cef52abefedb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '819c8a7ff0aa4d7186bd859e4b56d16e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 804.179220] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.179436] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquired lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.179650] env[61898]: DEBUG nova.network.neutron [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 804.235683] env[61898]: DEBUG oslo_concurrency.lockutils [req-0ba05c5d-5302-4cfa-9f5d-8a19176e3f60 req-014112fa-584c-43b5-95b8-466f6ca36a21 service nova] Releasing lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.279510] env[61898]: DEBUG oslo_concurrency.lockutils [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.279510] env[61898]: DEBUG oslo_concurrency.lockutils [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.279510] env[61898]: DEBUG oslo_concurrency.lockutils [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.279510] env[61898]: DEBUG oslo_concurrency.lockutils [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.279510] env[61898]: DEBUG oslo_concurrency.lockutils [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.284638] env[61898]: INFO nova.compute.manager [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Terminating instance [ 804.302281] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240644, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.327461] env[61898]: DEBUG oslo_concurrency.lockutils [req-1d5d57bd-6dac-4780-b942-a2aa65cea0ff req-e1ab1c3d-f830-4158-9470-7c1cbc357448 service nova] Releasing lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.357173] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240640, 'name': CreateVM_Task, 'duration_secs': 1.058695} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.363272] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 804.365114] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.365114] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.365302] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 804.366112] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92165ef4-b741-443c-ace3-5f55430e7d9e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.372762] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240645, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.379042] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 804.379042] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52259feb-7eb9-6f60-e434-997fa1f314c8" [ 804.379042] env[61898]: _type = "Task" [ 804.379042] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.389213] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52259feb-7eb9-6f60-e434-997fa1f314c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.403248] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9b7f706c-23a8-4699-9cc9-d0375b6d3c73 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lock "d6c96dce-13ae-411a-b52a-fee484718a8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.960s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.469379] env[61898]: DEBUG oslo_vmware.api [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240643, 'name': PowerOnVM_Task, 'duration_secs': 0.552887} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.471053] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 804.471312] env[61898]: INFO nova.compute.manager [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Took 9.06 seconds to spawn the instance on the hypervisor. [ 804.471496] env[61898]: DEBUG nova.compute.manager [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 804.472752] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2800da-0d31-4f5c-9eb3-d8fa5a823b21 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.486782] env[61898]: DEBUG nova.network.neutron [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Successfully created port: fe9b11a7-dec5-4707-bb53-ea517e5a1b55 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 804.499696] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d798464-086c-4eb5-94f2-139624347cc1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.507547] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae3fd23-2987-42b0-a9d9-1bc394ee74e8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.543261] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcc556e-b451-451c-96dc-3e64d5b73420 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.551973] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495cb617-4afd-4504-b508-fa41b9673a53 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.567153] env[61898]: DEBUG nova.compute.provider_tree [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.576200] env[61898]: DEBUG nova.compute.manager [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 804.718334] env[61898]: DEBUG nova.network.neutron [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 804.798879] env[61898]: DEBUG nova.compute.manager [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 804.799167] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 804.800407] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c579acee-e98b-4f0a-bc7a-8c586e87fa9d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.813064] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240644, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.593074} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.814709] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 4db53fdf-7107-43c5-a57c-65d54b807909/4db53fdf-7107-43c5-a57c-65d54b807909.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 804.814709] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 804.814709] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e33542a1-5548-448d-aef9-aa19bc041be2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.818693] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 804.819300] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ab7ed23-f8b1-4140-97d1-4ca6e9af4d48 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.823069] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 804.823069] env[61898]: value = "task-1240646" [ 804.823069] env[61898]: _type = "Task" [ 804.823069] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.827970] env[61898]: DEBUG oslo_vmware.api [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 804.827970] env[61898]: value = "task-1240647" [ 804.827970] env[61898]: _type = "Task" [ 804.827970] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.835838] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240646, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.841286] env[61898]: DEBUG oslo_vmware.api [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.867077] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240645, 'name': CreateSnapshot_Task, 'duration_secs': 0.750738} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.867400] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 804.868231] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d552ae91-7921-4fc5-a403-0ff4f029b56e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.886993] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.887276] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.894674] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52259feb-7eb9-6f60-e434-997fa1f314c8, 'name': SearchDatastore_Task, 'duration_secs': 0.015051} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.895236] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.895497] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.895793] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.896063] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.896161] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 804.896449] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d630700f-78b3-4767-b55a-3383fff7f6af {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.906184] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 804.906388] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 804.907492] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88654fc9-ea53-466d-9336-1026c385719c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.913684] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 804.913684] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]520f3041-130f-9899-ee04-23a9a5d66904" [ 804.913684] env[61898]: _type = "Task" [ 804.913684] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.923726] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520f3041-130f-9899-ee04-23a9a5d66904, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.999587] env[61898]: INFO nova.compute.manager [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Took 32.88 seconds to build instance. [ 805.073811] env[61898]: DEBUG nova.scheduler.client.report [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 805.151262] env[61898]: DEBUG nova.compute.manager [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Received event network-vif-plugged-16ae0702-2627-4e8c-a2fc-a0e9d977bd4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 805.151663] env[61898]: DEBUG oslo_concurrency.lockutils [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] Acquiring lock "626caecc-6389-4064-aafd-9968cee262ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.152018] env[61898]: DEBUG oslo_concurrency.lockutils [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] Lock "626caecc-6389-4064-aafd-9968cee262ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.152167] env[61898]: DEBUG oslo_concurrency.lockutils [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] Lock "626caecc-6389-4064-aafd-9968cee262ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.152357] env[61898]: DEBUG nova.compute.manager [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] No waiting events found dispatching network-vif-plugged-16ae0702-2627-4e8c-a2fc-a0e9d977bd4a {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 805.152557] env[61898]: WARNING nova.compute.manager [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Received unexpected event network-vif-plugged-16ae0702-2627-4e8c-a2fc-a0e9d977bd4a for instance with vm_state building and task_state spawning. [ 805.152748] env[61898]: DEBUG nova.compute.manager [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Received event network-changed-16ae0702-2627-4e8c-a2fc-a0e9d977bd4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 805.152937] env[61898]: DEBUG nova.compute.manager [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Refreshing instance network info cache due to event network-changed-16ae0702-2627-4e8c-a2fc-a0e9d977bd4a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 805.153155] env[61898]: DEBUG oslo_concurrency.lockutils [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] Acquiring lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.184768] env[61898]: DEBUG nova.network.neutron [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Updating instance_info_cache with network_info: [{"id": "16ae0702-2627-4e8c-a2fc-a0e9d977bd4a", "address": "fa:16:3e:ab:76:29", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ae0702-26", "ovs_interfaceid": "16ae0702-2627-4e8c-a2fc-a0e9d977bd4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.336187] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240646, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073728} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.336187] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 805.337054] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14f063f-7b4f-4135-b4c9-a499742245ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.343283] env[61898]: DEBUG oslo_vmware.api [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240647, 'name': PowerOffVM_Task, 'duration_secs': 0.452503} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.343938] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 805.344091] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 805.344351] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d08f0d76-3c79-40ad-877e-cb63492220b1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.364317] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 4db53fdf-7107-43c5-a57c-65d54b807909/4db53fdf-7107-43c5-a57c-65d54b807909.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 805.364317] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e151d25-55ab-48e2-8945-589ece0c2e8c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.386923] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 805.388582] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-0129f9c2-1e5d-4462-81f1-3f5a6bd0f4fb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.391619] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 805.391619] env[61898]: value = "task-1240649" [ 805.391619] env[61898]: _type = "Task" [ 805.391619] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.396170] env[61898]: DEBUG nova.compute.manager [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 805.398907] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 805.398907] env[61898]: value = "task-1240650" [ 805.398907] env[61898]: _type = "Task" [ 805.398907] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.405770] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240649, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.411697] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240650, 'name': CloneVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.429282] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520f3041-130f-9899-ee04-23a9a5d66904, 'name': SearchDatastore_Task, 'duration_secs': 0.028255} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.430192] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e0823df-ac38-4b44-8227-191fd57b2070 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.434320] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 805.434527] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 805.435026] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleting the datastore file [datastore2] b106ab9e-08d4-4d18-90e0-13a071c9efb1 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 805.435592] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-806dbd55-092d-4a45-9c20-e7d53b2e0214 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.439259] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 805.439259] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]520b94c0-20ae-95bc-426e-44f3d1a206a9" [ 805.439259] env[61898]: _type = "Task" [ 805.439259] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.444651] env[61898]: DEBUG oslo_vmware.api [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 805.444651] env[61898]: value = "task-1240651" [ 805.444651] env[61898]: _type = "Task" [ 805.444651] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.452542] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520b94c0-20ae-95bc-426e-44f3d1a206a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.457608] env[61898]: DEBUG oslo_vmware.api [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.472870] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Volume attach. Driver type: vmdk {{(pid=61898) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 805.473151] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267635', 'volume_id': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'name': 'volume-bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1fb4535d-47d8-45c5-b6d6-d05e57237b98', 'attached_at': '', 'detached_at': '', 'volume_id': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'serial': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 805.474027] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bdb869-26f0-4982-934a-c2715381d4a0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.491393] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a412b7ef-5c34-4380-99b8-8cff053bf7a7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.509450] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b9323471-e322-4e73-bf26-85e124d4ba3a tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Lock "b709df92-bf56-40ed-ba48-a8fa19be8b68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.916s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.517424] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] volume-bcbf2abe-9a1d-4f11-b843-5d7210e16392/volume-bcbf2abe-9a1d-4f11-b843-5d7210e16392.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 805.517782] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de781310-8d8e-4b48-8b31-438e47e876ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.536357] env[61898]: DEBUG oslo_vmware.api [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 805.536357] env[61898]: value = "task-1240652" [ 805.536357] env[61898]: _type = "Task" [ 805.536357] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.545013] env[61898]: DEBUG oslo_vmware.api [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240652, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.581035] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.013s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.582844] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.175s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.583112] env[61898]: DEBUG nova.objects.instance [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lazy-loading 'resources' on Instance uuid a0580308-d25b-47cb-9c1c-adb763be7925 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.589136] env[61898]: DEBUG nova.compute.manager [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 805.610614] env[61898]: INFO nova.scheduler.client.report [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Deleted allocations for instance 29eadea9-fa85-4f51-97d0-a941e1658094 [ 805.620043] env[61898]: DEBUG nova.virt.hardware [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 805.620212] env[61898]: DEBUG nova.virt.hardware [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 805.620437] env[61898]: DEBUG nova.virt.hardware [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.620525] env[61898]: DEBUG nova.virt.hardware [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 805.620678] env[61898]: DEBUG nova.virt.hardware [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.620827] env[61898]: DEBUG nova.virt.hardware [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 805.621110] env[61898]: DEBUG nova.virt.hardware [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 805.621298] env[61898]: DEBUG nova.virt.hardware [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 805.621491] env[61898]: DEBUG nova.virt.hardware [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 805.621667] env[61898]: DEBUG nova.virt.hardware [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 805.621843] env[61898]: DEBUG nova.virt.hardware [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 805.623889] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b721d1cf-dc10-4a2b-862c-a64af78d76f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.632345] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e001bb-f2ac-4c68-afbc-3df242c65361 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.691026] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Releasing lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.691026] env[61898]: DEBUG nova.compute.manager [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Instance network_info: |[{"id": "16ae0702-2627-4e8c-a2fc-a0e9d977bd4a", "address": "fa:16:3e:ab:76:29", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ae0702-26", "ovs_interfaceid": "16ae0702-2627-4e8c-a2fc-a0e9d977bd4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 805.691026] env[61898]: DEBUG oslo_concurrency.lockutils [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] Acquired lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.691026] env[61898]: DEBUG nova.network.neutron [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Refreshing network info cache for port 16ae0702-2627-4e8c-a2fc-a0e9d977bd4a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 805.691026] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:76:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f016d1-34a6-4ebd-81ed-a6bf9d109b87', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16ae0702-2627-4e8c-a2fc-a0e9d977bd4a', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 805.699405] env[61898]: DEBUG oslo.service.loopingcall [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 805.700834] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 805.701325] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b45c893-83db-407a-8fa3-e854d4cd4515 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.730140] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 805.730140] env[61898]: value = "task-1240653" [ 805.730140] env[61898]: _type = "Task" [ 805.730140] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.740236] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240653, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.909745] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240649, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.916650] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240650, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.927876] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.953801] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520b94c0-20ae-95bc-426e-44f3d1a206a9, 'name': SearchDatastore_Task, 'duration_secs': 0.01358} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.954523] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.954863] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 070bc0cc-ff77-48b8-bd08-f17fe69e25af/070bc0cc-ff77-48b8-bd08-f17fe69e25af.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 805.955092] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0a9f37a-9f9d-40f4-b613-28323b53ff1a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.961141] env[61898]: DEBUG oslo_vmware.api [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240651, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.332292} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.961886] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 805.962147] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 805.962402] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 805.962654] env[61898]: INFO nova.compute.manager [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Took 1.16 seconds to destroy the instance on the hypervisor. [ 805.962955] env[61898]: DEBUG oslo.service.loopingcall [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 805.963223] env[61898]: DEBUG nova.compute.manager [-] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 805.963362] env[61898]: DEBUG nova.network.neutron [-] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 805.966544] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 805.966544] env[61898]: value = "task-1240654" [ 805.966544] env[61898]: _type = "Task" [ 805.966544] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.975901] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.047867] env[61898]: DEBUG oslo_vmware.api [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240652, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.119510] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9c9106a-c976-4ec0-bc54-1355bae3b7f2 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "29eadea9-fa85-4f51-97d0-a941e1658094" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.311s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.245949] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240653, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.403882] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240649, 'name': ReconfigVM_Task, 'duration_secs': 0.571935} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.407893] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 4db53fdf-7107-43c5-a57c-65d54b807909/4db53fdf-7107-43c5-a57c-65d54b807909.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 806.411898] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b2b9b1d-960c-47e9-b15a-c0d7bdfcb425 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.419914] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240650, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.421819] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 806.421819] env[61898]: value = "task-1240655" [ 806.421819] env[61898]: _type = "Task" [ 806.421819] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.437327] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240655, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.477199] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240654, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.522951] env[61898]: DEBUG nova.compute.manager [req-f465234b-5954-4f53-adbc-73a742ed86a4 req-1f20a3d4-2be8-44b3-84a0-7690266de632 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Received event network-vif-deleted-238d786d-b9f2-4cbb-86ed-2508303aa88f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 806.522951] env[61898]: INFO nova.compute.manager [req-f465234b-5954-4f53-adbc-73a742ed86a4 req-1f20a3d4-2be8-44b3-84a0-7690266de632 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Neutron deleted interface 238d786d-b9f2-4cbb-86ed-2508303aa88f; detaching it from the instance and deleting it from the info cache [ 806.523228] env[61898]: DEBUG nova.network.neutron [req-f465234b-5954-4f53-adbc-73a742ed86a4 req-1f20a3d4-2be8-44b3-84a0-7690266de632 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.551605] env[61898]: DEBUG oslo_vmware.api [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240652, 'name': ReconfigVM_Task, 'duration_secs': 0.542631} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.552133] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Reconfigured VM instance instance-00000034 to attach disk [datastore1] volume-bcbf2abe-9a1d-4f11-b843-5d7210e16392/volume-bcbf2abe-9a1d-4f11-b843-5d7210e16392.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 806.561215] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c0977c5-303b-4257-877b-2bab55ff01c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.582417] env[61898]: DEBUG oslo_vmware.api [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 806.582417] env[61898]: value = "task-1240656" [ 806.582417] env[61898]: _type = "Task" [ 806.582417] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.591862] env[61898]: DEBUG oslo_vmware.api [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240656, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.593952] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c4227a-9c9c-4aaa-9cd3-df42f9abb66f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.601431] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f271dc10-ebf0-4665-9254-a745962fb880 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.636350] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbad1318-2b0d-4620-87fb-d233eaaf6db2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.640252] env[61898]: DEBUG nova.network.neutron [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Updated VIF entry in instance network info cache for port 16ae0702-2627-4e8c-a2fc-a0e9d977bd4a. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 806.640729] env[61898]: DEBUG nova.network.neutron [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Updating instance_info_cache with network_info: [{"id": "16ae0702-2627-4e8c-a2fc-a0e9d977bd4a", "address": "fa:16:3e:ab:76:29", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ae0702-26", "ovs_interfaceid": "16ae0702-2627-4e8c-a2fc-a0e9d977bd4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.647537] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22e41f5-8708-49dd-bad7-49e033409b73 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.664287] env[61898]: DEBUG nova.compute.provider_tree [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.703705] env[61898]: DEBUG nova.network.neutron [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Successfully updated port: fe9b11a7-dec5-4707-bb53-ea517e5a1b55 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 806.742357] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240653, 'name': CreateVM_Task, 'duration_secs': 0.607853} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.742537] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 806.743295] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.744019] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.744019] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 806.744512] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13b3983d-0d3e-4d22-aa85-6da9406f74fb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.749818] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 806.749818] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]520bcd9d-f2ad-34ef-a71d-e414cddc236d" [ 806.749818] env[61898]: _type = "Task" [ 806.749818] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.759315] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520bcd9d-f2ad-34ef-a71d-e414cddc236d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.912470] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240650, 'name': CloneVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.933223] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240655, 'name': Rename_Task, 'duration_secs': 0.158494} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.933599] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 806.934101] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25ff9200-6ce7-4c4a-ab92-7940bf9b558e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.939666] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 806.939666] env[61898]: value = "task-1240657" [ 806.939666] env[61898]: _type = "Task" [ 806.939666] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.947461] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240657, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.968642] env[61898]: DEBUG nova.network.neutron [-] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.985192] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565706} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.986402] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 070bc0cc-ff77-48b8-bd08-f17fe69e25af/070bc0cc-ff77-48b8-bd08-f17fe69e25af.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 806.986507] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 806.986676] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4aaf3df-3e98-4494-9215-aa87011817f4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.994414] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 806.994414] env[61898]: value = "task-1240658" [ 806.994414] env[61898]: _type = "Task" [ 806.994414] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.003142] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240658, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.029845] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef73ffa2-ee5f-4ee4-84c2-934401409412 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.038641] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1348fbaa-7d03-402b-bf68-5ab34cfc65ea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.073036] env[61898]: DEBUG nova.compute.manager [req-f465234b-5954-4f53-adbc-73a742ed86a4 req-1f20a3d4-2be8-44b3-84a0-7690266de632 service nova] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Detach interface failed, port_id=238d786d-b9f2-4cbb-86ed-2508303aa88f, reason: Instance b106ab9e-08d4-4d18-90e0-13a071c9efb1 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 807.092023] env[61898]: DEBUG oslo_vmware.api [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240656, 'name': ReconfigVM_Task, 'duration_secs': 0.168417} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.092023] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267635', 'volume_id': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'name': 'volume-bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1fb4535d-47d8-45c5-b6d6-d05e57237b98', 'attached_at': '', 'detached_at': '', 'volume_id': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'serial': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 807.143875] env[61898]: DEBUG oslo_concurrency.lockutils [req-5a3fad6d-5c7b-4148-a9b4-2297c55225f5 req-1baebab4-fb00-4d05-a037-4985c2e2e95a service nova] Releasing lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.170189] env[61898]: DEBUG nova.scheduler.client.report [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 807.179746] env[61898]: DEBUG nova.compute.manager [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Received event network-changed-93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 807.179944] env[61898]: DEBUG nova.compute.manager [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Refreshing instance network info cache due to event network-changed-93f00603-54ee-451f-9579-32f82d4923b0. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 807.180176] env[61898]: DEBUG oslo_concurrency.lockutils [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] Acquiring lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.180315] env[61898]: DEBUG oslo_concurrency.lockutils [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] Acquired lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.180470] env[61898]: DEBUG nova.network.neutron [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Refreshing network info cache for port 93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.206414] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "refresh_cache-86367a82-239b-4f6e-b306-d9661eadf95e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.206559] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "refresh_cache-86367a82-239b-4f6e-b306-d9661eadf95e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.206741] env[61898]: DEBUG nova.network.neutron [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.260549] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520bcd9d-f2ad-34ef-a71d-e414cddc236d, 'name': SearchDatastore_Task, 'duration_secs': 0.011916} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.260745] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 807.260984] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 807.261257] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.261444] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.261633] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 807.261893] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a73c556e-0f2c-4500-ab00-a78f0cdc157f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.275893] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 807.276090] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 807.276889] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cd38699-c817-4f9d-97f4-fa43e2aa7aa0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.283012] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 807.283012] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5245ae27-18e6-9232-cea0-05f22f884fc8" [ 807.283012] env[61898]: _type = "Task" [ 807.283012] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.290940] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5245ae27-18e6-9232-cea0-05f22f884fc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.301535] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquiring lock "b709df92-bf56-40ed-ba48-a8fa19be8b68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.301790] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Lock "b709df92-bf56-40ed-ba48-a8fa19be8b68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.301996] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquiring lock "b709df92-bf56-40ed-ba48-a8fa19be8b68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.302275] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Lock "b709df92-bf56-40ed-ba48-a8fa19be8b68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.302452] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Lock "b709df92-bf56-40ed-ba48-a8fa19be8b68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.305189] env[61898]: INFO nova.compute.manager [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Terminating instance [ 807.413565] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240650, 'name': CloneVM_Task, 'duration_secs': 1.608392} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.413725] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Created linked-clone VM from snapshot [ 807.414471] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16f3713-c036-4de0-a58e-809499754c7e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.423156] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Uploading image 98751983-6ee3-4b66-bcb8-0f2c032b7883 {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 807.445721] env[61898]: DEBUG oslo_vmware.rw_handles [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 807.445721] env[61898]: value = "vm-267640" [ 807.445721] env[61898]: _type = "VirtualMachine" [ 807.445721] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 807.446321] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7c727236-df64-4f4e-a916-d1cd30322be7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.453022] env[61898]: DEBUG oslo_vmware.api [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240657, 'name': PowerOnVM_Task, 'duration_secs': 0.482442} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.454191] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 807.454399] env[61898]: INFO nova.compute.manager [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Took 9.54 seconds to spawn the instance on the hypervisor. [ 807.454573] env[61898]: DEBUG nova.compute.manager [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 807.454872] env[61898]: DEBUG oslo_vmware.rw_handles [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lease: (returnval){ [ 807.454872] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]526bc644-9614-677d-8b7a-e1f819a8b861" [ 807.454872] env[61898]: _type = "HttpNfcLease" [ 807.454872] env[61898]: } obtained for exporting VM: (result){ [ 807.454872] env[61898]: value = "vm-267640" [ 807.454872] env[61898]: _type = "VirtualMachine" [ 807.454872] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 807.455138] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the lease: (returnval){ [ 807.455138] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]526bc644-9614-677d-8b7a-e1f819a8b861" [ 807.455138] env[61898]: _type = "HttpNfcLease" [ 807.455138] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 807.455718] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bab265-16bf-443c-b08f-9498b6650e9c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.467070] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 807.467070] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]526bc644-9614-677d-8b7a-e1f819a8b861" [ 807.467070] env[61898]: _type = "HttpNfcLease" [ 807.467070] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 807.474729] env[61898]: INFO nova.compute.manager [-] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Took 1.51 seconds to deallocate network for instance. [ 807.505219] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240658, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082586} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.505510] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 807.506427] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a736b9-aa55-430e-9715-97d340171e76 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.529458] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 070bc0cc-ff77-48b8-bd08-f17fe69e25af/070bc0cc-ff77-48b8-bd08-f17fe69e25af.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 807.529989] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc88168f-7715-4614-973a-25372da6499c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.550824] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 807.550824] env[61898]: value = "task-1240660" [ 807.550824] env[61898]: _type = "Task" [ 807.550824] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.559022] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240660, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.675191] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.092s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.677819] env[61898]: DEBUG oslo_concurrency.lockutils [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.359s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.678212] env[61898]: DEBUG nova.objects.instance [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lazy-loading 'resources' on Instance uuid 5b51a1a5-7d54-4063-b680-e8b8b39fc46a {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.704079] env[61898]: INFO nova.scheduler.client.report [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Deleted allocations for instance a0580308-d25b-47cb-9c1c-adb763be7925 [ 807.788355] env[61898]: DEBUG nova.network.neutron [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.796198] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5245ae27-18e6-9232-cea0-05f22f884fc8, 'name': SearchDatastore_Task, 'duration_secs': 0.038089} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.797077] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91f984b1-858e-4ec2-a9c2-6b374596dc45 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.802479] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 807.802479] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5250de4c-44f9-43f6-1b92-4cf144ba527d" [ 807.802479] env[61898]: _type = "Task" [ 807.802479] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.812345] env[61898]: DEBUG nova.compute.manager [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 807.812345] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 807.814744] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1e3298-8427-4215-b1cb-0dd9de09e4de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.826750] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5250de4c-44f9-43f6-1b92-4cf144ba527d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.833286] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.834132] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36f2d625-287e-4e93-a1c0-cbe3cdccc8e6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.846023] env[61898]: DEBUG oslo_vmware.api [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for the task: (returnval){ [ 807.846023] env[61898]: value = "task-1240661" [ 807.846023] env[61898]: _type = "Task" [ 807.846023] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.855465] env[61898]: DEBUG oslo_vmware.api [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240661, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.966680] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 807.966680] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]526bc644-9614-677d-8b7a-e1f819a8b861" [ 807.966680] env[61898]: _type = "HttpNfcLease" [ 807.966680] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 807.967074] env[61898]: DEBUG oslo_vmware.rw_handles [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 807.967074] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]526bc644-9614-677d-8b7a-e1f819a8b861" [ 807.967074] env[61898]: _type = "HttpNfcLease" [ 807.967074] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 807.967714] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ea99b3-29a7-4308-9d59-a194e7e52316 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.981145] env[61898]: DEBUG oslo_concurrency.lockutils [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.981657] env[61898]: INFO nova.compute.manager [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Took 32.91 seconds to build instance. [ 807.986374] env[61898]: DEBUG oslo_vmware.rw_handles [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a0458f-5f97-a7a3-c84b-623d52295c33/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 807.986832] env[61898]: DEBUG oslo_vmware.rw_handles [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a0458f-5f97-a7a3-c84b-623d52295c33/disk-0.vmdk for reading. {{(pid=61898) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 808.068558] env[61898]: DEBUG nova.network.neutron [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Updating instance_info_cache with network_info: [{"id": "fe9b11a7-dec5-4707-bb53-ea517e5a1b55", "address": "fa:16:3e:e4:cd:ba", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe9b11a7-de", "ovs_interfaceid": "fe9b11a7-dec5-4707-bb53-ea517e5a1b55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.085303] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240660, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.117586] env[61898]: DEBUG nova.network.neutron [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updated VIF entry in instance network info cache for port 93f00603-54ee-451f-9579-32f82d4923b0. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.118736] env[61898]: DEBUG nova.network.neutron [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updating instance_info_cache with network_info: [{"id": "93f00603-54ee-451f-9579-32f82d4923b0", "address": "fa:16:3e:45:b7:24", "network": {"id": "48f12b71-35b5-474e-bfdb-7ef68e2d7b22", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-66759866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f368913c359420cbd16ef48aa83e27c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f00603-54", "ovs_interfaceid": "93f00603-54ee-451f-9579-32f82d4923b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.128156] env[61898]: DEBUG nova.objects.instance [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'flavor' on Instance uuid 1fb4535d-47d8-45c5-b6d6-d05e57237b98 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 808.175877] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1ee48645-5f26-494b-922c-e5506778ab59 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.214810] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cedb9137-0059-4367-9682-36f96f348198 tempest-ListServersNegativeTestJSON-165458236 tempest-ListServersNegativeTestJSON-165458236-project-member] Lock "a0580308-d25b-47cb-9c1c-adb763be7925" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.217s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.312680] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5250de4c-44f9-43f6-1b92-4cf144ba527d, 'name': SearchDatastore_Task, 'duration_secs': 0.032085} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.312940] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.313206] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 626caecc-6389-4064-aafd-9968cee262ee/626caecc-6389-4064-aafd-9968cee262ee.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 808.313458] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74d3fc5e-3967-4730-8eb2-33589766bae8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.319742] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 808.319742] env[61898]: value = "task-1240662" [ 808.319742] env[61898]: _type = "Task" [ 808.319742] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.330661] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.330868] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240662, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.360395] env[61898]: DEBUG oslo_vmware.api [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240661, 'name': PowerOffVM_Task, 'duration_secs': 0.228077} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.360395] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 808.360395] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 808.360395] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a9b98a0-c5cc-4be6-a271-ad10b73a27c7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.490406] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42ec1931-a35d-4e5e-a917-1a73fa4b26a4 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "4db53fdf-7107-43c5-a57c-65d54b807909" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.510s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.541456] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d45553-ba90-4b15-b330-01f13d4dc004 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.550435] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13320847-9200-4601-a1cb-9928aeec3c2c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.584704] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "4db53fdf-7107-43c5-a57c-65d54b807909" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.584996] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "4db53fdf-7107-43c5-a57c-65d54b807909" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.585308] env[61898]: INFO nova.compute.manager [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Shelving [ 808.587488] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "refresh_cache-86367a82-239b-4f6e-b306-d9661eadf95e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.588708] env[61898]: DEBUG nova.compute.manager [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Instance network_info: |[{"id": "fe9b11a7-dec5-4707-bb53-ea517e5a1b55", "address": "fa:16:3e:e4:cd:ba", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe9b11a7-de", "ovs_interfaceid": "fe9b11a7-dec5-4707-bb53-ea517e5a1b55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 808.593034] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:cd:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe9b11a7-dec5-4707-bb53-ea517e5a1b55', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.601610] env[61898]: DEBUG oslo.service.loopingcall [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.602518] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeac80c4-96c4-4233-8971-a37d984a006e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.607704] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 808.608662] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e24b9c84-aeb8-4fd1-96f3-fccbb3ffbde3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.632243] env[61898]: DEBUG oslo_concurrency.lockutils [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] Releasing lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.632539] env[61898]: DEBUG nova.compute.manager [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Received event network-vif-plugged-fe9b11a7-dec5-4707-bb53-ea517e5a1b55 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 808.632834] env[61898]: DEBUG oslo_concurrency.lockutils [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] Acquiring lock "86367a82-239b-4f6e-b306-d9661eadf95e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.633122] env[61898]: DEBUG oslo_concurrency.lockutils [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] Lock "86367a82-239b-4f6e-b306-d9661eadf95e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.633360] env[61898]: DEBUG oslo_concurrency.lockutils [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] Lock "86367a82-239b-4f6e-b306-d9661eadf95e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.633727] env[61898]: DEBUG nova.compute.manager [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] No waiting events found dispatching network-vif-plugged-fe9b11a7-dec5-4707-bb53-ea517e5a1b55 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 808.633981] env[61898]: WARNING nova.compute.manager [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Received unexpected event network-vif-plugged-fe9b11a7-dec5-4707-bb53-ea517e5a1b55 for instance with vm_state building and task_state spawning. [ 808.634341] env[61898]: DEBUG nova.compute.manager [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Received event network-changed-fe9b11a7-dec5-4707-bb53-ea517e5a1b55 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 808.634532] env[61898]: DEBUG nova.compute.manager [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Refreshing instance network info cache due to event network-changed-fe9b11a7-dec5-4707-bb53-ea517e5a1b55. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 808.634744] env[61898]: DEBUG oslo_concurrency.lockutils [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] Acquiring lock "refresh_cache-86367a82-239b-4f6e-b306-d9661eadf95e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.634976] env[61898]: DEBUG oslo_concurrency.lockutils [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] Acquired lock "refresh_cache-86367a82-239b-4f6e-b306-d9661eadf95e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.635252] env[61898]: DEBUG nova.network.neutron [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Refreshing network info cache for port fe9b11a7-dec5-4707-bb53-ea517e5a1b55 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 808.640024] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240660, 'name': ReconfigVM_Task, 'duration_secs': 0.924775} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.640024] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 808.640024] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 808.640024] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Deleting the datastore file [datastore2] b709df92-bf56-40ed-ba48-a8fa19be8b68 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 808.643433] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 070bc0cc-ff77-48b8-bd08-f17fe69e25af/070bc0cc-ff77-48b8-bd08-f17fe69e25af.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 808.644629] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b6ca920-1144-4f3f-941c-76868c2223ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.650370] env[61898]: DEBUG oslo_concurrency.lockutils [None req-85164c41-48d1-4bd6-92ff-b5e9d74e49d0 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.821s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.650875] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1a42f13-5b50-46f8-b65c-156b8be156d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.653995] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a38ab36-f9cb-4728-a8b4-d3157866da0e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.660096] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.330s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.660429] env[61898]: DEBUG nova.compute.manager [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 808.664409] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2ba25e-20ec-441d-95c0-f194df331ee7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.669324] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.669324] env[61898]: value = "task-1240664" [ 808.669324] env[61898]: _type = "Task" [ 808.669324] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.671386] env[61898]: DEBUG oslo_vmware.api [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for the task: (returnval){ [ 808.671386] env[61898]: value = "task-1240665" [ 808.671386] env[61898]: _type = "Task" [ 808.671386] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.692748] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 808.692748] env[61898]: value = "task-1240666" [ 808.692748] env[61898]: _type = "Task" [ 808.692748] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.693326] env[61898]: DEBUG nova.compute.provider_tree [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.699994] env[61898]: DEBUG nova.compute.manager [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61898) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 808.699994] env[61898]: DEBUG nova.objects.instance [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'flavor' on Instance uuid 1fb4535d-47d8-45c5-b6d6-d05e57237b98 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 808.713489] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240664, 'name': CreateVM_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.716155] env[61898]: DEBUG oslo_vmware.api [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240665, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.722924] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240666, 'name': Rename_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.830387] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240662, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473532} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.831679] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 626caecc-6389-4064-aafd-9968cee262ee/626caecc-6389-4064-aafd-9968cee262ee.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 808.831679] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 808.831679] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ae81b8e-ab49-4695-8305-1b87562f4975 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.840028] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 808.840028] env[61898]: value = "task-1240667" [ 808.840028] env[61898]: _type = "Task" [ 808.840028] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.853608] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240667, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.182972] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240664, 'name': CreateVM_Task, 'duration_secs': 0.377462} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.184281] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 809.184402] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.184628] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.185074] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 809.188277] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97dc5565-c05d-4090-bdf5-80ec856e5449 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.191115] env[61898]: DEBUG oslo_vmware.api [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Task: {'id': task-1240665, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207576} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.192687] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 809.192961] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 809.193242] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 809.193512] env[61898]: INFO nova.compute.manager [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Took 1.38 seconds to destroy the instance on the hypervisor. [ 809.193783] env[61898]: DEBUG oslo.service.loopingcall [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 809.194444] env[61898]: DEBUG nova.compute.manager [-] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 809.194543] env[61898]: DEBUG nova.network.neutron [-] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 809.197626] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 809.197626] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b9b0d5-013c-4a82-f02e-f6c711dbd0a2" [ 809.197626] env[61898]: _type = "Task" [ 809.197626] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.205197] env[61898]: DEBUG nova.scheduler.client.report [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 809.214393] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b9b0d5-013c-4a82-f02e-f6c711dbd0a2, 'name': SearchDatastore_Task, 'duration_secs': 0.01012} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.218731] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.218835] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.220708] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.220708] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.220708] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.221798] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240666, 'name': Rename_Task, 'duration_secs': 0.175638} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.223331] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40d2580d-e7f2-44e1-9ec9-b2b1d2cde023 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.225673] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 809.226316] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddd13ef0-4e8c-47ee-b371-c9b6e869f0df {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.233786] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 809.233786] env[61898]: value = "task-1240668" [ 809.233786] env[61898]: _type = "Task" [ 809.233786] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.236413] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.236661] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 809.240603] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b522199-cd7a-408d-be8d-fe80e8f00139 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.246999] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 809.246999] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]527ea3f2-9634-8b3f-509f-21ca59633485" [ 809.246999] env[61898]: _type = "Task" [ 809.246999] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.250479] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240668, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.260490] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527ea3f2-9634-8b3f-509f-21ca59633485, 'name': SearchDatastore_Task, 'duration_secs': 0.010761} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.261236] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0b36076-b172-4935-b456-44aa06fd234f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.266390] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 809.266390] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]527e3d2d-8f0f-f48c-72fa-7ce3af808754" [ 809.266390] env[61898]: _type = "Task" [ 809.266390] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.275469] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527e3d2d-8f0f-f48c-72fa-7ce3af808754, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.352436] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240667, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088158} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.352667] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 809.354054] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86183d71-9b37-497b-b96b-2d64f12c00c1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.384181] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 626caecc-6389-4064-aafd-9968cee262ee/626caecc-6389-4064-aafd-9968cee262ee.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 809.385032] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7297e9d4-84e4-4300-b534-b1f32099e32b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.408119] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 809.408119] env[61898]: value = "task-1240669" [ 809.408119] env[61898]: _type = "Task" [ 809.408119] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.417550] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240669, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.598526] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 809.598526] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30e5c7e7-3670-44db-9a9b-e886775ad0e6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.605834] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 809.605834] env[61898]: value = "task-1240670" [ 809.605834] env[61898]: _type = "Task" [ 809.605834] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.614632] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240670, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.681704] env[61898]: DEBUG nova.network.neutron [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Updated VIF entry in instance network info cache for port fe9b11a7-dec5-4707-bb53-ea517e5a1b55. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 809.682266] env[61898]: DEBUG nova.network.neutron [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Updating instance_info_cache with network_info: [{"id": "fe9b11a7-dec5-4707-bb53-ea517e5a1b55", "address": "fa:16:3e:e4:cd:ba", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe9b11a7-de", "ovs_interfaceid": "fe9b11a7-dec5-4707-bb53-ea517e5a1b55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.711663] env[61898]: DEBUG oslo_concurrency.lockutils [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.034s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.715554] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.774s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.716084] env[61898]: DEBUG nova.objects.instance [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61898) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 809.728206] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 809.728206] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4db9229-76fe-4899-b05a-6746c3fb6c35 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.736215] env[61898]: DEBUG oslo_vmware.api [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 809.736215] env[61898]: value = "task-1240671" [ 809.736215] env[61898]: _type = "Task" [ 809.736215] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.750349] env[61898]: INFO nova.scheduler.client.report [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Deleted allocations for instance 5b51a1a5-7d54-4063-b680-e8b8b39fc46a [ 809.769178] env[61898]: DEBUG oslo_vmware.api [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.769872] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240668, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.785385] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527e3d2d-8f0f-f48c-72fa-7ce3af808754, 'name': SearchDatastore_Task, 'duration_secs': 0.009945} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.785828] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.786221] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 86367a82-239b-4f6e-b306-d9661eadf95e/86367a82-239b-4f6e-b306-d9661eadf95e.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 809.786604] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bb1ecf8-8468-4806-8190-c6e6dfc26224 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.795601] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 809.795601] env[61898]: value = "task-1240672" [ 809.795601] env[61898]: _type = "Task" [ 809.795601] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.807722] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.877889] env[61898]: DEBUG nova.compute.manager [req-ba4fc749-8a03-4e14-9cb8-1476ac142835 req-387ef11c-6052-458f-80e1-b23709b74619 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Received event network-vif-deleted-c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 809.877889] env[61898]: INFO nova.compute.manager [req-ba4fc749-8a03-4e14-9cb8-1476ac142835 req-387ef11c-6052-458f-80e1-b23709b74619 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Neutron deleted interface c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9; detaching it from the instance and deleting it from the info cache [ 809.878049] env[61898]: DEBUG nova.network.neutron [req-ba4fc749-8a03-4e14-9cb8-1476ac142835 req-387ef11c-6052-458f-80e1-b23709b74619 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.918673] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240669, 'name': ReconfigVM_Task, 'duration_secs': 0.320104} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.919067] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 626caecc-6389-4064-aafd-9968cee262ee/626caecc-6389-4064-aafd-9968cee262ee.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 809.919825] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65164bc9-d0c0-4804-a105-5cfa21b636fa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.926508] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 809.926508] env[61898]: value = "task-1240673" [ 809.926508] env[61898]: _type = "Task" [ 809.926508] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.936795] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240673, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.120658] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240670, 'name': PowerOffVM_Task, 'duration_secs': 0.209586} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.121678] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 810.122724] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f7918b2-59fe-4407-9828-7e71a10c0d34 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.164587] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61c6eb0-5b4a-4065-a8b0-939d7e41730c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.185522] env[61898]: DEBUG oslo_concurrency.lockutils [req-730dfdec-164c-40ab-9111-84572d2548e5 req-f09dbd49-3290-44b0-a78b-9320c533fec5 service nova] Releasing lock "refresh_cache-86367a82-239b-4f6e-b306-d9661eadf95e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.256579] env[61898]: DEBUG oslo_vmware.api [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240668, 'name': PowerOnVM_Task, 'duration_secs': 0.688294} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.263371] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 810.263774] env[61898]: INFO nova.compute.manager [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Took 9.81 seconds to spawn the instance on the hypervisor. [ 810.264081] env[61898]: DEBUG nova.compute.manager [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 810.264956] env[61898]: DEBUG oslo_vmware.api [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240671, 'name': PowerOffVM_Task, 'duration_secs': 0.219807} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.266101] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4031ce02-50fc-49dd-a8b1-948108f30760 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.270411] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 810.270833] env[61898]: DEBUG nova.compute.manager [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 810.275064] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed13c570-6493-49c8-a362-97c42171b8f1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.283549] env[61898]: DEBUG oslo_concurrency.lockutils [None req-82ec5151-3086-4d21-98d5-765e87a601b9 tempest-ServersTestMultiNic-1436633293 tempest-ServersTestMultiNic-1436633293-project-member] Lock "5b51a1a5-7d54-4063-b680-e8b8b39fc46a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.645s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.315802] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.350600] env[61898]: DEBUG nova.network.neutron [-] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.381157] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ae6e22c-977b-4ce1-b573-3e5c619bc28c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.391701] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0a5afb-9523-43e3-9dd6-c28a1579315a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.429071] env[61898]: DEBUG nova.compute.manager [req-ba4fc749-8a03-4e14-9cb8-1476ac142835 req-387ef11c-6052-458f-80e1-b23709b74619 service nova] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Detach interface failed, port_id=c3bb7681-34f1-4ad2-bc51-0c5048a8f0c9, reason: Instance b709df92-bf56-40ed-ba48-a8fa19be8b68 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 810.439209] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240673, 'name': Rename_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.678022] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 810.678022] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ef2f1cf3-3e92-4431-9418-c864fe3b73bf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.685152] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 810.685152] env[61898]: value = "task-1240674" [ 810.685152] env[61898]: _type = "Task" [ 810.685152] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.696498] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240674, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.731422] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e7cbc41-3dcf-4ae3-a3a3-595bab30416d tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.735884] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.216s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.738741] env[61898]: INFO nova.compute.claims [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 810.813949] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a925a024-e61a-4371-a242-db38dddefd03 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.154s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.818101] env[61898]: INFO nova.compute.manager [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Took 29.87 seconds to build instance. [ 810.823456] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240672, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.855358] env[61898]: INFO nova.compute.manager [-] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Took 1.66 seconds to deallocate network for instance. [ 810.940157] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240673, 'name': Rename_Task, 'duration_secs': 0.892748} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.941164] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 810.941996] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e50c511-4d48-4f2c-9e3e-181665b06289 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.950242] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 810.950242] env[61898]: value = "task-1240675" [ 810.950242] env[61898]: _type = "Task" [ 810.950242] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.959655] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240675, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.194672] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240674, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.314073] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240672, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.355571} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.314588] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 86367a82-239b-4f6e-b306-d9661eadf95e/86367a82-239b-4f6e-b306-d9661eadf95e.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 811.314856] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 811.315296] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-82caaaf1-d8cb-4d69-a2f4-5a4c1976400e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.323050] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 811.323050] env[61898]: value = "task-1240676" [ 811.323050] env[61898]: _type = "Task" [ 811.323050] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.328861] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3127d389-0ace-400f-b13d-7bc8f98cd335 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.760s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.335468] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240676, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.363575] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.462611] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240675, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.702016] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240674, 'name': CreateSnapshot_Task, 'duration_secs': 0.795915} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.702016] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 811.703783] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e597c1d0-08f6-487e-aaee-27b7d0d57647 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.833278] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240676, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07912} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.836290] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 811.841101] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5a108c-016c-43ea-a354-674c23b80256 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.867592] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 86367a82-239b-4f6e-b306-d9661eadf95e/86367a82-239b-4f6e-b306-d9661eadf95e.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.870682] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3f8e25e-5978-43ed-8d45-7123e78fbc79 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.895028] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 811.895028] env[61898]: value = "task-1240677" [ 811.895028] env[61898]: _type = "Task" [ 811.895028] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.905371] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240677, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.963499] env[61898]: DEBUG oslo_vmware.api [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240675, 'name': PowerOnVM_Task, 'duration_secs': 0.637163} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.964464] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 811.964464] env[61898]: INFO nova.compute.manager [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Took 8.94 seconds to spawn the instance on the hypervisor. [ 811.964464] env[61898]: DEBUG nova.compute.manager [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 811.965031] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a205ae4-92d4-4cab-b493-8b739411b20a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.202359] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a98de6-a916-497c-ac61-d92b0457dcfd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.210591] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25369497-6a21-4bd5-897d-ffe18de976d0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.215143] env[61898]: DEBUG nova.objects.instance [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'flavor' on Instance uuid 1fb4535d-47d8-45c5-b6d6-d05e57237b98 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 812.253278] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 812.257084] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ff5cfba6-3d71-4345-9387-e38101a9e86d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.261120] env[61898]: DEBUG nova.compute.manager [req-45cdca83-ec9b-4c74-bd00-8ccc91e4ac59 req-c6a74f11-b5bf-4f8b-9953-736fbe33fa34 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Received event network-changed-cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 812.261336] env[61898]: DEBUG nova.compute.manager [req-45cdca83-ec9b-4c74-bd00-8ccc91e4ac59 req-c6a74f11-b5bf-4f8b-9953-736fbe33fa34 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Refreshing instance network info cache due to event network-changed-cd3bd232-226d-4ac0-a9f8-17b93aca92fb. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 812.261562] env[61898]: DEBUG oslo_concurrency.lockutils [req-45cdca83-ec9b-4c74-bd00-8ccc91e4ac59 req-c6a74f11-b5bf-4f8b-9953-736fbe33fa34 service nova] Acquiring lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.261709] env[61898]: DEBUG oslo_concurrency.lockutils [req-45cdca83-ec9b-4c74-bd00-8ccc91e4ac59 req-c6a74f11-b5bf-4f8b-9953-736fbe33fa34 service nova] Acquired lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.261871] env[61898]: DEBUG nova.network.neutron [req-45cdca83-ec9b-4c74-bd00-8ccc91e4ac59 req-c6a74f11-b5bf-4f8b-9953-736fbe33fa34 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Refreshing network info cache for port cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 812.263470] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e173fd97-a106-4c79-8eba-18f514ee99c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.274252] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ca3898-db71-4584-8688-a611668e249d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.278910] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 812.278910] env[61898]: value = "task-1240678" [ 812.278910] env[61898]: _type = "Task" [ 812.278910] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.293038] env[61898]: DEBUG nova.compute.provider_tree [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.298920] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240678, 'name': CloneVM_Task} progress is 11%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.406763] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.493540] env[61898]: INFO nova.compute.manager [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Took 30.66 seconds to build instance. [ 812.726021] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.726021] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquired lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.726021] env[61898]: DEBUG nova.network.neutron [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 812.726021] env[61898]: DEBUG nova.objects.instance [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'info_cache' on Instance uuid 1fb4535d-47d8-45c5-b6d6-d05e57237b98 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 812.790675] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240678, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.796077] env[61898]: DEBUG nova.scheduler.client.report [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 812.909056] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240677, 'name': ReconfigVM_Task, 'duration_secs': 0.662641} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.909601] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 86367a82-239b-4f6e-b306-d9661eadf95e/86367a82-239b-4f6e-b306-d9661eadf95e.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.910321] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-636b980d-9b8c-4788-82ea-d4f706acb5c4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.917287] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 812.917287] env[61898]: value = "task-1240679" [ 812.917287] env[61898]: _type = "Task" [ 812.917287] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.929600] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240679, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.997120] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2974d60-bde9-44ea-9685-3999e68b8cc9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "626caecc-6389-4064-aafd-9968cee262ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.323s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.067688] env[61898]: DEBUG nova.network.neutron [req-45cdca83-ec9b-4c74-bd00-8ccc91e4ac59 req-c6a74f11-b5bf-4f8b-9953-736fbe33fa34 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updated VIF entry in instance network info cache for port cd3bd232-226d-4ac0-a9f8-17b93aca92fb. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 813.068121] env[61898]: DEBUG nova.network.neutron [req-45cdca83-ec9b-4c74-bd00-8ccc91e4ac59 req-c6a74f11-b5bf-4f8b-9953-736fbe33fa34 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updating instance_info_cache with network_info: [{"id": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "address": "fa:16:3e:0e:f5:a4", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd3bd232-22", "ovs_interfaceid": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.229198] env[61898]: DEBUG nova.objects.base [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Object Instance<1fb4535d-47d8-45c5-b6d6-d05e57237b98> lazy-loaded attributes: flavor,info_cache {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 813.294745] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240678, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.300922] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.302872] env[61898]: DEBUG nova.compute.manager [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 813.307166] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.135s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.307166] env[61898]: INFO nova.compute.claims [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.434371] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240679, 'name': Rename_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.575830] env[61898]: DEBUG oslo_concurrency.lockutils [req-45cdca83-ec9b-4c74-bd00-8ccc91e4ac59 req-c6a74f11-b5bf-4f8b-9953-736fbe33fa34 service nova] Releasing lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.799077] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240678, 'name': CloneVM_Task, 'duration_secs': 1.381193} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.801659] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Created linked-clone VM from snapshot [ 813.802449] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1526d5-0bbb-40ca-a605-c6c214883f85 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.810330] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Uploading image d0979276-b6d6-4292-baee-aeebb173bc3a {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 813.813474] env[61898]: DEBUG nova.compute.utils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 813.815050] env[61898]: DEBUG nova.compute.manager [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 813.815050] env[61898]: DEBUG nova.network.neutron [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 813.844908] env[61898]: DEBUG oslo_vmware.rw_handles [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 813.844908] env[61898]: value = "vm-267644" [ 813.844908] env[61898]: _type = "VirtualMachine" [ 813.844908] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 813.845511] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d00d9bc6-b392-4ca6-9d85-e9560497496a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.852590] env[61898]: DEBUG oslo_vmware.rw_handles [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lease: (returnval){ [ 813.852590] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521d5287-7280-bf45-c41f-0bf6d6a8999f" [ 813.852590] env[61898]: _type = "HttpNfcLease" [ 813.852590] env[61898]: } obtained for exporting VM: (result){ [ 813.852590] env[61898]: value = "vm-267644" [ 813.852590] env[61898]: _type = "VirtualMachine" [ 813.852590] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 813.853030] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the lease: (returnval){ [ 813.853030] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521d5287-7280-bf45-c41f-0bf6d6a8999f" [ 813.853030] env[61898]: _type = "HttpNfcLease" [ 813.853030] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 813.862021] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 813.862021] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521d5287-7280-bf45-c41f-0bf6d6a8999f" [ 813.862021] env[61898]: _type = "HttpNfcLease" [ 813.862021] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 813.919592] env[61898]: DEBUG nova.policy [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88a5606350204fcfbba29e2d90e90e2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11539a8a92af4208a15e69afe3dc60e8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 813.931854] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240679, 'name': Rename_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.181371] env[61898]: DEBUG nova.network.neutron [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Updating instance_info_cache with network_info: [{"id": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "address": "fa:16:3e:4c:bb:e6", "network": {"id": "a7782ec9-c3cb-41be-b52c-f40deb036970", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-870279198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ca09762c2e4b119437aa5b1a36e133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9c5b33c-50", "ovs_interfaceid": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.318531] env[61898]: DEBUG nova.compute.manager [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 814.364097] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 814.364097] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521d5287-7280-bf45-c41f-0bf6d6a8999f" [ 814.364097] env[61898]: _type = "HttpNfcLease" [ 814.364097] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 814.366531] env[61898]: DEBUG oslo_vmware.rw_handles [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 814.366531] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521d5287-7280-bf45-c41f-0bf6d6a8999f" [ 814.366531] env[61898]: _type = "HttpNfcLease" [ 814.366531] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 814.367986] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c516be-560e-4f9e-89a0-0a8e31a694c4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.379717] env[61898]: DEBUG oslo_vmware.rw_handles [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cc28ee-f099-4a1f-ab75-3ba91fee6db6/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 814.379967] env[61898]: DEBUG oslo_vmware.rw_handles [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cc28ee-f099-4a1f-ab75-3ba91fee6db6/disk-0.vmdk for reading. {{(pid=61898) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 814.465281] env[61898]: DEBUG nova.network.neutron [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Successfully created port: 18c9f135-7f21-49ea-828d-27bdc6813079 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.473912] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240679, 'name': Rename_Task, 'duration_secs': 1.249042} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.474247] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 814.474541] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64515f5f-16f0-461a-a5c5-e19de2163961 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.481225] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 814.481225] env[61898]: value = "task-1240681" [ 814.481225] env[61898]: _type = "Task" [ 814.481225] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.495072] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240681, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.504169] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c5fe62dc-a5f0-4293-9eaa-b16e00b5b6ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.688779] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Releasing lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.836305] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9678da-cb01-4334-be34-381e9776a049 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.845647] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f821cd39-a594-4eef-a1d5-bb59cc2dfef6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.886799] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5febfc-1a3a-4862-9c9d-fb3e672e0915 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.898715] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335c9bcd-f0e0-4dfb-b147-88aa5e24fceb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.917489] env[61898]: DEBUG nova.compute.provider_tree [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.994897] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240681, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.333866] env[61898]: DEBUG nova.compute.manager [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 815.387207] env[61898]: DEBUG nova.compute.manager [req-6b8857a5-a611-4ce5-9c92-f439931fdfb1 req-ece14301-554b-48b2-8a68-0de581f03f73 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Received event network-changed-0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 815.387398] env[61898]: DEBUG nova.compute.manager [req-6b8857a5-a611-4ce5-9c92-f439931fdfb1 req-ece14301-554b-48b2-8a68-0de581f03f73 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Refreshing instance network info cache due to event network-changed-0d2007ae-42aa-44eb-9414-3216e1c433d4. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 815.387751] env[61898]: DEBUG oslo_concurrency.lockutils [req-6b8857a5-a611-4ce5-9c92-f439931fdfb1 req-ece14301-554b-48b2-8a68-0de581f03f73 service nova] Acquiring lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.389537] env[61898]: DEBUG oslo_concurrency.lockutils [req-6b8857a5-a611-4ce5-9c92-f439931fdfb1 req-ece14301-554b-48b2-8a68-0de581f03f73 service nova] Acquired lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.390173] env[61898]: DEBUG nova.network.neutron [req-6b8857a5-a611-4ce5-9c92-f439931fdfb1 req-ece14301-554b-48b2-8a68-0de581f03f73 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Refreshing network info cache for port 0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 815.421168] env[61898]: DEBUG nova.scheduler.client.report [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 815.494625] env[61898]: DEBUG oslo_vmware.api [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240681, 'name': PowerOnVM_Task, 'duration_secs': 0.873969} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.494861] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 815.495179] env[61898]: INFO nova.compute.manager [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Took 9.91 seconds to spawn the instance on the hypervisor. [ 815.495542] env[61898]: DEBUG nova.compute.manager [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 815.496540] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35356ac-685d-472a-9d24-0a9ad118e3a2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.699343] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 815.703513] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54734b22-4265-4ed0-8526-a75d1f393b69 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.712556] env[61898]: DEBUG oslo_vmware.api [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 815.712556] env[61898]: value = "task-1240682" [ 815.712556] env[61898]: _type = "Task" [ 815.712556] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.723408] env[61898]: DEBUG oslo_vmware.api [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240682, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.929662] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.624s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.930425] env[61898]: DEBUG nova.compute.manager [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 815.933440] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.723s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.933689] env[61898]: DEBUG nova.objects.instance [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lazy-loading 'resources' on Instance uuid ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 816.016543] env[61898]: INFO nova.compute.manager [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Took 33.36 seconds to build instance. [ 816.226784] env[61898]: DEBUG oslo_vmware.api [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240682, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.436791] env[61898]: DEBUG nova.compute.utils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 816.441737] env[61898]: DEBUG nova.compute.manager [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 816.441737] env[61898]: DEBUG nova.network.neutron [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.520536] env[61898]: DEBUG oslo_concurrency.lockutils [None req-543a27f2-c71b-47af-a239-2360fd6b9fd8 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "86367a82-239b-4f6e-b306-d9661eadf95e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.517s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.525075] env[61898]: DEBUG nova.policy [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f25c631adf3c4f68b374a35b767a9429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30bd396aa1ff45ad946bc1a6fdb3b40b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 816.584790] env[61898]: DEBUG nova.network.neutron [req-6b8857a5-a611-4ce5-9c92-f439931fdfb1 req-ece14301-554b-48b2-8a68-0de581f03f73 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updated VIF entry in instance network info cache for port 0d2007ae-42aa-44eb-9414-3216e1c433d4. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 816.584790] env[61898]: DEBUG nova.network.neutron [req-6b8857a5-a611-4ce5-9c92-f439931fdfb1 req-ece14301-554b-48b2-8a68-0de581f03f73 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updating instance_info_cache with network_info: [{"id": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "address": "fa:16:3e:89:6e:d4", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d2007ae-42", "ovs_interfaceid": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.732267] env[61898]: DEBUG oslo_vmware.api [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240682, 'name': PowerOnVM_Task, 'duration_secs': 0.53263} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.733079] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 816.733335] env[61898]: DEBUG nova.compute.manager [None req-1e252066-2475-4eb6-b51a-b2258dbfec93 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 816.734728] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab6e820-6e48-4846-996f-af588a608f0b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.791649] env[61898]: DEBUG nova.network.neutron [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Successfully updated port: 18c9f135-7f21-49ea-828d-27bdc6813079 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.796669] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a4202d-3c1b-4f01-b01b-ccb10ebf4f29 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.808232] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c85818-07e1-4595-a7fc-3b6373850893 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.846413] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c996baf-abe0-42a5-bac5-1f0d76936556 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.858547] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0600fa59-4ff9-4d62-b977-d06a79aabab3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.875443] env[61898]: DEBUG nova.compute.provider_tree [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.944899] env[61898]: DEBUG nova.compute.manager [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 817.035868] env[61898]: DEBUG nova.network.neutron [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Successfully created port: 6ebfe4e9-10ed-455d-bb95-26e7d9dbc197 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.087368] env[61898]: DEBUG oslo_concurrency.lockutils [req-6b8857a5-a611-4ce5-9c92-f439931fdfb1 req-ece14301-554b-48b2-8a68-0de581f03f73 service nova] Releasing lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.302769] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "refresh_cache-e19e820c-154d-4e91-8631-dab9439d11a2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.302769] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired lock "refresh_cache-e19e820c-154d-4e91-8631-dab9439d11a2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.302769] env[61898]: DEBUG nova.network.neutron [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.378618] env[61898]: DEBUG nova.scheduler.client.report [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 817.843573] env[61898]: DEBUG nova.network.neutron [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.849807] env[61898]: DEBUG nova.virt.hardware [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 817.850321] env[61898]: DEBUG nova.virt.hardware [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 817.850687] env[61898]: DEBUG nova.virt.hardware [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.851030] env[61898]: DEBUG nova.virt.hardware [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 817.851322] env[61898]: DEBUG nova.virt.hardware [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.851590] env[61898]: DEBUG nova.virt.hardware [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 817.851966] env[61898]: DEBUG nova.virt.hardware [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 817.852275] env[61898]: DEBUG nova.virt.hardware [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 817.852606] env[61898]: DEBUG nova.virt.hardware [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 817.853814] env[61898]: DEBUG nova.virt.hardware [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 817.853814] env[61898]: DEBUG nova.virt.hardware [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 817.856075] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5ffd2c-85a8-4db0-bf22-df634bc3414a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.865137] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570e38b5-75db-4f91-ada1-114bca0f3b82 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.871947] env[61898]: DEBUG oslo_vmware.rw_handles [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a0458f-5f97-a7a3-c84b-623d52295c33/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 817.874771] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4d45e3-ebc5-4843-aeb9-db07fce57161 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.883544] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.950s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.885977] env[61898]: DEBUG oslo_vmware.rw_handles [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a0458f-5f97-a7a3-c84b-623d52295c33/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 817.885977] env[61898]: ERROR oslo_vmware.rw_handles [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a0458f-5f97-a7a3-c84b-623d52295c33/disk-0.vmdk due to incomplete transfer. [ 817.895597] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.809s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.897212] env[61898]: INFO nova.compute.claims [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.899922] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9257c4d4-b37a-40a0-8f23-ca333d15b616 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.913961] env[61898]: DEBUG oslo_vmware.rw_handles [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a0458f-5f97-a7a3-c84b-623d52295c33/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 817.914299] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Uploaded image 98751983-6ee3-4b66-bcb8-0f2c032b7883 to the Glance image server {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 817.916892] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 817.917346] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d9a42afe-9726-4af2-9221-08e52329a446 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.928331] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 817.928331] env[61898]: value = "task-1240683" [ 817.928331] env[61898]: _type = "Task" [ 817.928331] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.929440] env[61898]: INFO nova.scheduler.client.report [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Deleted allocations for instance ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e [ 817.949179] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240683, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.957036] env[61898]: DEBUG nova.compute.manager [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 817.982690] env[61898]: DEBUG nova.virt.hardware [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 817.983024] env[61898]: DEBUG nova.virt.hardware [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 817.983203] env[61898]: DEBUG nova.virt.hardware [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.984223] env[61898]: DEBUG nova.virt.hardware [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 817.984223] env[61898]: DEBUG nova.virt.hardware [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.984223] env[61898]: DEBUG nova.virt.hardware [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 817.984223] env[61898]: DEBUG nova.virt.hardware [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 817.984223] env[61898]: DEBUG nova.virt.hardware [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 817.984721] env[61898]: DEBUG nova.virt.hardware [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 817.984721] env[61898]: DEBUG nova.virt.hardware [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 817.984721] env[61898]: DEBUG nova.virt.hardware [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 817.986711] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8885d263-4988-48a5-9def-a2a01f3ca3a5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.999919] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025d92ec-8801-43e0-b9c6-d878b3c038f5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.048563] env[61898]: DEBUG nova.network.neutron [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Updating instance_info_cache with network_info: [{"id": "18c9f135-7f21-49ea-828d-27bdc6813079", "address": "fa:16:3e:93:59:1e", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18c9f135-7f", "ovs_interfaceid": "18c9f135-7f21-49ea-828d-27bdc6813079", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.089940] env[61898]: DEBUG nova.compute.manager [req-ac2c70d0-e434-4a38-88e4-c277a72e7c4a req-f7c23a31-b1a2-4fca-8efe-927f9a0daa3b service nova] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Received event network-vif-plugged-18c9f135-7f21-49ea-828d-27bdc6813079 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 818.090026] env[61898]: DEBUG oslo_concurrency.lockutils [req-ac2c70d0-e434-4a38-88e4-c277a72e7c4a req-f7c23a31-b1a2-4fca-8efe-927f9a0daa3b service nova] Acquiring lock "e19e820c-154d-4e91-8631-dab9439d11a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.090703] env[61898]: DEBUG oslo_concurrency.lockutils [req-ac2c70d0-e434-4a38-88e4-c277a72e7c4a req-f7c23a31-b1a2-4fca-8efe-927f9a0daa3b service nova] Lock "e19e820c-154d-4e91-8631-dab9439d11a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.090703] env[61898]: DEBUG oslo_concurrency.lockutils [req-ac2c70d0-e434-4a38-88e4-c277a72e7c4a req-f7c23a31-b1a2-4fca-8efe-927f9a0daa3b service nova] Lock "e19e820c-154d-4e91-8631-dab9439d11a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.090990] env[61898]: DEBUG nova.compute.manager [req-ac2c70d0-e434-4a38-88e4-c277a72e7c4a req-f7c23a31-b1a2-4fca-8efe-927f9a0daa3b service nova] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] No waiting events found dispatching network-vif-plugged-18c9f135-7f21-49ea-828d-27bdc6813079 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 818.091316] env[61898]: WARNING nova.compute.manager [req-ac2c70d0-e434-4a38-88e4-c277a72e7c4a req-f7c23a31-b1a2-4fca-8efe-927f9a0daa3b service nova] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Received unexpected event network-vif-plugged-18c9f135-7f21-49ea-828d-27bdc6813079 for instance with vm_state building and task_state spawning. [ 818.447291] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240683, 'name': Destroy_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.447291] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0ac993ff-f28d-4666-9f50-823f65372484 tempest-ServerShowV257Test-986419587 tempest-ServerShowV257Test-986419587-project-member] Lock "ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.542s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.553701] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lock "refresh_cache-e19e820c-154d-4e91-8631-dab9439d11a2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.554199] env[61898]: DEBUG nova.compute.manager [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Instance network_info: |[{"id": "18c9f135-7f21-49ea-828d-27bdc6813079", "address": "fa:16:3e:93:59:1e", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18c9f135-7f", "ovs_interfaceid": "18c9f135-7f21-49ea-828d-27bdc6813079", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 818.555109] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:59:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd69a4b11-8d65-435f-94a5-28f74a39a718', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18c9f135-7f21-49ea-828d-27bdc6813079', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.566072] env[61898]: DEBUG oslo.service.loopingcall [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 818.566072] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.566072] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee88e9f6-d007-49c9-af2d-a92ae8783ce0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.592213] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.592213] env[61898]: value = "task-1240684" [ 818.592213] env[61898]: _type = "Task" [ 818.592213] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.602441] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240684, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.822526] env[61898]: DEBUG nova.compute.manager [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Received event network-changed-0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 818.822526] env[61898]: DEBUG nova.compute.manager [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Refreshing instance network info cache due to event network-changed-0d2007ae-42aa-44eb-9414-3216e1c433d4. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 818.822526] env[61898]: DEBUG oslo_concurrency.lockutils [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] Acquiring lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.822526] env[61898]: DEBUG oslo_concurrency.lockutils [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] Acquired lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.822526] env[61898]: DEBUG nova.network.neutron [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Refreshing network info cache for port 0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.950752] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240683, 'name': Destroy_Task, 'duration_secs': 0.543329} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.956294] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Destroyed the VM [ 818.956918] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 818.957657] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fb03dae3-aefa-4c35-995b-952fe65d3d39 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.968236] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 818.968236] env[61898]: value = "task-1240685" [ 818.968236] env[61898]: _type = "Task" [ 818.968236] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.982425] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240685, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.058423] env[61898]: DEBUG nova.network.neutron [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Successfully updated port: 6ebfe4e9-10ed-455d-bb95-26e7d9dbc197 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 819.105186] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240684, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.239691] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.239691] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.351375] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8647a285-d0d8-4980-8d11-8cb5e4fdb68f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.369495] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de6cd46-30a1-4537-a785-6008a697a120 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.407767] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6f456e-febe-43f7-8e9a-27b1c094ca9b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.419037] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704339bf-b1c6-47a9-bef7-e1470bbccd24 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.436794] env[61898]: DEBUG nova.compute.provider_tree [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 819.481054] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240685, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.561645] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "refresh_cache-51f33e74-0bb3-488c-9a6d-d1ccc53f469b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.561850] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "refresh_cache-51f33e74-0bb3-488c-9a6d-d1ccc53f469b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.562016] env[61898]: DEBUG nova.network.neutron [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 819.608772] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240684, 'name': CreateVM_Task, 'duration_secs': 0.579739} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.608950] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 819.609738] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.609907] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.610338] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 819.610611] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e55f708e-3d1a-4f93-8543-e7a4e28b197d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.618194] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 819.618194] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a7e941-4c55-b433-ae08-043e7e1fcd22" [ 819.618194] env[61898]: _type = "Task" [ 819.618194] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.628572] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a7e941-4c55-b433-ae08-043e7e1fcd22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.747388] env[61898]: DEBUG nova.compute.manager [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 819.801232] env[61898]: DEBUG nova.network.neutron [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updated VIF entry in instance network info cache for port 0d2007ae-42aa-44eb-9414-3216e1c433d4. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 819.801232] env[61898]: DEBUG nova.network.neutron [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updating instance_info_cache with network_info: [{"id": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "address": "fa:16:3e:89:6e:d4", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d2007ae-42", "ovs_interfaceid": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.941870] env[61898]: DEBUG nova.scheduler.client.report [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 819.970472] env[61898]: INFO nova.compute.manager [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Rebuilding instance [ 819.983432] env[61898]: DEBUG oslo_vmware.api [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240685, 'name': RemoveSnapshot_Task, 'duration_secs': 0.837129} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.983765] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 819.984370] env[61898]: INFO nova.compute.manager [None req-dbc76f3e-0148-43c5-995e-8c10b8db1540 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Took 16.67 seconds to snapshot the instance on the hypervisor. [ 820.015778] env[61898]: DEBUG nova.compute.manager [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 820.016704] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3045e2-4cfe-4498-8df7-d51ed294f7a3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.130862] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a7e941-4c55-b433-ae08-043e7e1fcd22, 'name': SearchDatastore_Task, 'duration_secs': 0.019665} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.131328] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.131623] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 820.131882] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.132039] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.132227] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.132545] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-279e843b-067a-430e-89ef-0bc760d4d118 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.144223] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.144467] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 820.145283] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5a421c7-ac95-4e34-be2c-f234ec251711 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.151908] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 820.151908] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52cfd4bf-7fbb-a324-d626-ef4ac14f423e" [ 820.151908] env[61898]: _type = "Task" [ 820.151908] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.162195] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cfd4bf-7fbb-a324-d626-ef4ac14f423e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.271835] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.305765] env[61898]: DEBUG oslo_concurrency.lockutils [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] Releasing lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.306792] env[61898]: DEBUG nova.compute.manager [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Received event network-changed-16ae0702-2627-4e8c-a2fc-a0e9d977bd4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 820.306792] env[61898]: DEBUG nova.compute.manager [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Refreshing instance network info cache due to event network-changed-16ae0702-2627-4e8c-a2fc-a0e9d977bd4a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 820.306792] env[61898]: DEBUG oslo_concurrency.lockutils [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] Acquiring lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.306792] env[61898]: DEBUG oslo_concurrency.lockutils [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] Acquired lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.307087] env[61898]: DEBUG nova.network.neutron [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Refreshing network info cache for port 16ae0702-2627-4e8c-a2fc-a0e9d977bd4a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 820.334026] env[61898]: DEBUG nova.network.neutron [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.449905] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.450794] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 820.456917] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.327s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.458374] env[61898]: INFO nova.compute.claims [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.512016] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.512528] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.653640] env[61898]: DEBUG nova.network.neutron [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Updating instance_info_cache with network_info: [{"id": "6ebfe4e9-10ed-455d-bb95-26e7d9dbc197", "address": "fa:16:3e:52:66:b1", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ebfe4e9-10", "ovs_interfaceid": "6ebfe4e9-10ed-455d-bb95-26e7d9dbc197", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.665753] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cfd4bf-7fbb-a324-d626-ef4ac14f423e, 'name': SearchDatastore_Task, 'duration_secs': 0.016033} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.667114] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a0d63be-2cdd-418b-9fbb-779236eb9805 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.673830] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 820.673830] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52560580-caa1-1600-55d5-4fa43bc089ff" [ 820.673830] env[61898]: _type = "Task" [ 820.673830] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.684088] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52560580-caa1-1600-55d5-4fa43bc089ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.963532] env[61898]: DEBUG nova.compute.utils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 820.967488] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 820.967668] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 821.017785] env[61898]: DEBUG nova.policy [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f64f22a09c344e468e74742efbd05cff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a476b83a7bda4078b4690a73adfea8c9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 821.019724] env[61898]: DEBUG nova.compute.manager [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 821.030894] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 821.031198] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e9b0a95-5319-4d55-a5c4-1367c12dc3b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.041303] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for the task: (returnval){ [ 821.041303] env[61898]: value = "task-1240686" [ 821.041303] env[61898]: _type = "Task" [ 821.041303] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.053171] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.076210] env[61898]: DEBUG nova.compute.manager [req-fed086c6-27c2-4995-b910-b1e3ded483f9 req-e3d5051d-356b-4665-83af-6a0dafbcacc7 service nova] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Received event network-changed-18c9f135-7f21-49ea-828d-27bdc6813079 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 821.076210] env[61898]: DEBUG nova.compute.manager [req-fed086c6-27c2-4995-b910-b1e3ded483f9 req-e3d5051d-356b-4665-83af-6a0dafbcacc7 service nova] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Refreshing instance network info cache due to event network-changed-18c9f135-7f21-49ea-828d-27bdc6813079. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 821.076210] env[61898]: DEBUG oslo_concurrency.lockutils [req-fed086c6-27c2-4995-b910-b1e3ded483f9 req-e3d5051d-356b-4665-83af-6a0dafbcacc7 service nova] Acquiring lock "refresh_cache-e19e820c-154d-4e91-8631-dab9439d11a2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.076210] env[61898]: DEBUG oslo_concurrency.lockutils [req-fed086c6-27c2-4995-b910-b1e3ded483f9 req-e3d5051d-356b-4665-83af-6a0dafbcacc7 service nova] Acquired lock "refresh_cache-e19e820c-154d-4e91-8631-dab9439d11a2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.076210] env[61898]: DEBUG nova.network.neutron [req-fed086c6-27c2-4995-b910-b1e3ded483f9 req-e3d5051d-356b-4665-83af-6a0dafbcacc7 service nova] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Refreshing network info cache for port 18c9f135-7f21-49ea-828d-27bdc6813079 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.159654] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "refresh_cache-51f33e74-0bb3-488c-9a6d-d1ccc53f469b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.159998] env[61898]: DEBUG nova.compute.manager [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Instance network_info: |[{"id": "6ebfe4e9-10ed-455d-bb95-26e7d9dbc197", "address": "fa:16:3e:52:66:b1", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ebfe4e9-10", "ovs_interfaceid": "6ebfe4e9-10ed-455d-bb95-26e7d9dbc197", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 821.160656] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:66:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4223acd2-30f7-440e-b975-60b30d931694', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ebfe4e9-10ed-455d-bb95-26e7d9dbc197', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.169822] env[61898]: DEBUG oslo.service.loopingcall [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 821.170347] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.170645] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a6283ac-9bfd-4cc5-90af-c5b12ae88071 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.198882] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52560580-caa1-1600-55d5-4fa43bc089ff, 'name': SearchDatastore_Task, 'duration_secs': 0.010313} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.200448] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.200737] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] e19e820c-154d-4e91-8631-dab9439d11a2/e19e820c-154d-4e91-8631-dab9439d11a2.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 821.200977] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.200977] env[61898]: value = "task-1240687" [ 821.200977] env[61898]: _type = "Task" [ 821.200977] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.201676] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63775d7c-d79b-4739-922c-07f2274d7129 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.214678] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240687, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.221013] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 821.221013] env[61898]: value = "task-1240688" [ 821.221013] env[61898]: _type = "Task" [ 821.221013] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.231526] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240688, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.261447] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "52a584e1-61ae-447d-90e0-e15d32a96314" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.261844] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "52a584e1-61ae-447d-90e0-e15d32a96314" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.262170] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "52a584e1-61ae-447d-90e0-e15d32a96314-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.262591] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "52a584e1-61ae-447d-90e0-e15d32a96314-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.262672] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "52a584e1-61ae-447d-90e0-e15d32a96314-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.266082] env[61898]: INFO nova.compute.manager [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Terminating instance [ 821.328347] env[61898]: DEBUG nova.network.neutron [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Updated VIF entry in instance network info cache for port 16ae0702-2627-4e8c-a2fc-a0e9d977bd4a. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 821.332107] env[61898]: DEBUG nova.network.neutron [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Updating instance_info_cache with network_info: [{"id": "16ae0702-2627-4e8c-a2fc-a0e9d977bd4a", "address": "fa:16:3e:ab:76:29", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ae0702-26", "ovs_interfaceid": "16ae0702-2627-4e8c-a2fc-a0e9d977bd4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.368689] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Successfully created port: b5e10793-f18b-4c54-8373-45f9b9e9fd46 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.390258] env[61898]: DEBUG nova.compute.manager [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Received event network-vif-plugged-6ebfe4e9-10ed-455d-bb95-26e7d9dbc197 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 821.390853] env[61898]: DEBUG oslo_concurrency.lockutils [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] Acquiring lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.390853] env[61898]: DEBUG oslo_concurrency.lockutils [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] Lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.391118] env[61898]: DEBUG oslo_concurrency.lockutils [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] Lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.391314] env[61898]: DEBUG nova.compute.manager [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] No waiting events found dispatching network-vif-plugged-6ebfe4e9-10ed-455d-bb95-26e7d9dbc197 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 821.391534] env[61898]: WARNING nova.compute.manager [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Received unexpected event network-vif-plugged-6ebfe4e9-10ed-455d-bb95-26e7d9dbc197 for instance with vm_state building and task_state spawning. [ 821.391805] env[61898]: DEBUG nova.compute.manager [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Received event network-changed-6ebfe4e9-10ed-455d-bb95-26e7d9dbc197 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 821.391913] env[61898]: DEBUG nova.compute.manager [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Refreshing instance network info cache due to event network-changed-6ebfe4e9-10ed-455d-bb95-26e7d9dbc197. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 821.392205] env[61898]: DEBUG oslo_concurrency.lockutils [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] Acquiring lock "refresh_cache-51f33e74-0bb3-488c-9a6d-d1ccc53f469b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.392369] env[61898]: DEBUG oslo_concurrency.lockutils [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] Acquired lock "refresh_cache-51f33e74-0bb3-488c-9a6d-d1ccc53f469b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.392565] env[61898]: DEBUG nova.network.neutron [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Refreshing network info cache for port 6ebfe4e9-10ed-455d-bb95-26e7d9dbc197 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.468463] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 821.548555] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.564989] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240686, 'name': PowerOffVM_Task, 'duration_secs': 0.260511} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.566897] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 821.566897] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 821.566897] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33985dab-66f3-4b00-a0bf-71282d5b2eba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.578446] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for the task: (returnval){ [ 821.578446] env[61898]: value = "task-1240689" [ 821.578446] env[61898]: _type = "Task" [ 821.578446] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.597645] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] VM already powered off {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 821.597880] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Volume detach. Driver type: vmdk {{(pid=61898) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 821.598139] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267575', 'volume_id': '5e4f66be-193a-428e-ae80-03e6b55967d5', 'name': 'volume-5e4f66be-193a-428e-ae80-03e6b55967d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '466cbf07-e945-48d4-a103-5a3ea2b7adf6', 'attached_at': '', 'detached_at': '', 'volume_id': '5e4f66be-193a-428e-ae80-03e6b55967d5', 'serial': '5e4f66be-193a-428e-ae80-03e6b55967d5'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 821.599295] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6691a56e-f97d-4132-aeab-fde7414bbc2a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.633477] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da554c9-28a1-454f-a69b-4ced60093bcd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.644370] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7228c8-686e-49cb-979a-16cd5b1bdbd7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.672854] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89fda5b-839f-488a-8e79-4095fbbb66c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.695364] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] The volume has not been displaced from its original location: [datastore1] volume-5e4f66be-193a-428e-ae80-03e6b55967d5/volume-5e4f66be-193a-428e-ae80-03e6b55967d5.vmdk. No consolidation needed. {{(pid=61898) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 821.701839] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Reconfiguring VM instance instance-0000003b to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 821.702892] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4e47005-36bb-4532-9187-3c9e81f84efe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.735380] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240687, 'name': CreateVM_Task, 'duration_secs': 0.463138} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.736083] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for the task: (returnval){ [ 821.736083] env[61898]: value = "task-1240690" [ 821.736083] env[61898]: _type = "Task" [ 821.736083] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.737213] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.737254] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.737481] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.737940] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 821.745296] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d8682aa-5870-44b6-8ddd-3d7386f2b184 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.747382] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240688, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.757848] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240690, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.759611] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 821.759611] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52697f8b-c71f-80db-a48b-1be2cdc1a2b0" [ 821.759611] env[61898]: _type = "Task" [ 821.759611] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.774831] env[61898]: DEBUG nova.compute.manager [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 821.774831] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 821.774831] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52697f8b-c71f-80db-a48b-1be2cdc1a2b0, 'name': SearchDatastore_Task, 'duration_secs': 0.014147} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.774831] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d3e129-8529-4b43-b1ea-513e43cede8f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.778366] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.778630] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 821.778956] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.779143] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.779485] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 821.780206] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c37fa4c-bbf9-466a-b16d-0b19843e4c32 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.790573] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 821.791752] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9edc483d-7f54-499f-a056-7201aad3f196 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.793491] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 821.793713] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 821.794522] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-590b6ff3-30b1-46c3-bb5f-f6c69188e816 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.801620] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 821.801620] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528c33c6-c8a7-4aeb-f274-2dac1c7c9ef5" [ 821.801620] env[61898]: _type = "Task" [ 821.801620] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.817545] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]528c33c6-c8a7-4aeb-f274-2dac1c7c9ef5, 'name': SearchDatastore_Task, 'duration_secs': 0.012534} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.818488] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0522c9c2-a92a-4d9f-aa8d-3de569157e61 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.829357] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 821.829357] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a65bc7-f12f-b01c-702d-220870e7df93" [ 821.829357] env[61898]: _type = "Task" [ 821.829357] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.835402] env[61898]: DEBUG oslo_concurrency.lockutils [req-015d75e3-c124-4028-a8ac-786baead0479 req-07d07e0e-2325-48e0-a173-8f4b2650d8c3 service nova] Releasing lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.839293] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a65bc7-f12f-b01c-702d-220870e7df93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.872475] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 821.872786] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 821.873009] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleting the datastore file [datastore1] 52a584e1-61ae-447d-90e0-e15d32a96314 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 821.873374] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-206f31ab-994b-462c-bf94-0aacaae2cef6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.881328] env[61898]: DEBUG oslo_vmware.api [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 821.881328] env[61898]: value = "task-1240692" [ 821.881328] env[61898]: _type = "Task" [ 821.881328] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.898159] env[61898]: DEBUG oslo_vmware.api [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240692, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.995875] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caba5259-34a6-4a50-9dd9-9a255dc6b1a7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.005029] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660755b8-86d2-4d1c-b3da-2d891355f8c8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.049512] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc1514c-dc58-4b19-b8c2-ac3034049e77 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.056458] env[61898]: DEBUG nova.network.neutron [req-fed086c6-27c2-4995-b910-b1e3ded483f9 req-e3d5051d-356b-4665-83af-6a0dafbcacc7 service nova] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Updated VIF entry in instance network info cache for port 18c9f135-7f21-49ea-828d-27bdc6813079. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 822.056458] env[61898]: DEBUG nova.network.neutron [req-fed086c6-27c2-4995-b910-b1e3ded483f9 req-e3d5051d-356b-4665-83af-6a0dafbcacc7 service nova] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Updating instance_info_cache with network_info: [{"id": "18c9f135-7f21-49ea-828d-27bdc6813079", "address": "fa:16:3e:93:59:1e", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18c9f135-7f", "ovs_interfaceid": "18c9f135-7f21-49ea-828d-27bdc6813079", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.066629] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb17434-2952-425e-8b66-621509a93390 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.090126] env[61898]: DEBUG nova.compute.provider_tree [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.242977] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240688, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545982} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.244699] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] e19e820c-154d-4e91-8631-dab9439d11a2/e19e820c-154d-4e91-8631-dab9439d11a2.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 822.244936] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 822.245225] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c11fd68a-0fef-4a63-a82f-fef42d6ff3bf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.256009] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240690, 'name': ReconfigVM_Task, 'duration_secs': 0.204426} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.260412] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Reconfigured VM instance instance-0000003b to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 822.268594] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 822.268594] env[61898]: value = "task-1240693" [ 822.268594] env[61898]: _type = "Task" [ 822.268594] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.268594] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c5dc34e-ce44-464e-a6ac-83483f31d597 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.293075] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240693, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.294777] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for the task: (returnval){ [ 822.294777] env[61898]: value = "task-1240694" [ 822.294777] env[61898]: _type = "Task" [ 822.294777] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.305577] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240694, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.342028] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a65bc7-f12f-b01c-702d-220870e7df93, 'name': SearchDatastore_Task, 'duration_secs': 0.019548} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.344609] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.344609] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 51f33e74-0bb3-488c-9a6d-d1ccc53f469b/51f33e74-0bb3-488c-9a6d-d1ccc53f469b.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.344609] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-243e907e-3171-4ae8-85f2-f04784f8d288 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.352092] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 822.352092] env[61898]: value = "task-1240695" [ 822.352092] env[61898]: _type = "Task" [ 822.352092] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.359096] env[61898]: DEBUG nova.network.neutron [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Updated VIF entry in instance network info cache for port 6ebfe4e9-10ed-455d-bb95-26e7d9dbc197. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 822.359556] env[61898]: DEBUG nova.network.neutron [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Updating instance_info_cache with network_info: [{"id": "6ebfe4e9-10ed-455d-bb95-26e7d9dbc197", "address": "fa:16:3e:52:66:b1", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ebfe4e9-10", "ovs_interfaceid": "6ebfe4e9-10ed-455d-bb95-26e7d9dbc197", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.366185] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.395859] env[61898]: DEBUG oslo_vmware.api [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240692, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151653} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.396243] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 822.396478] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 822.396690] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 822.396949] env[61898]: INFO nova.compute.manager [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Took 0.62 seconds to destroy the instance on the hypervisor. [ 822.397395] env[61898]: DEBUG oslo.service.loopingcall [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 822.397694] env[61898]: DEBUG nova.compute.manager [-] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 822.397876] env[61898]: DEBUG nova.network.neutron [-] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 822.483328] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 822.515083] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 822.515352] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 822.515515] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.515701] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 822.515903] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.516052] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 822.516405] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 822.516597] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 822.516777] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 822.516945] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 822.517172] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 822.518103] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977f7047-1fd4-4206-a648-c58940dff3fa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.526958] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939173f0-0064-4336-9f4f-8eae87ff68b7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.557740] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "626caecc-6389-4064-aafd-9968cee262ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.558047] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "626caecc-6389-4064-aafd-9968cee262ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.558310] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "626caecc-6389-4064-aafd-9968cee262ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.558523] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "626caecc-6389-4064-aafd-9968cee262ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.558698] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "626caecc-6389-4064-aafd-9968cee262ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.560797] env[61898]: DEBUG oslo_concurrency.lockutils [req-fed086c6-27c2-4995-b910-b1e3ded483f9 req-e3d5051d-356b-4665-83af-6a0dafbcacc7 service nova] Releasing lock "refresh_cache-e19e820c-154d-4e91-8631-dab9439d11a2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.561463] env[61898]: INFO nova.compute.manager [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Terminating instance [ 822.594883] env[61898]: DEBUG nova.scheduler.client.report [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 822.752431] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.752431] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 822.801743] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240693, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.151849} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.807781] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.808858] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c272e9-acae-4046-a84a-b6f6e1ec7580 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.831700] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240694, 'name': ReconfigVM_Task, 'duration_secs': 0.224477} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.842602] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] e19e820c-154d-4e91-8631-dab9439d11a2/e19e820c-154d-4e91-8631-dab9439d11a2.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.845310] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267575', 'volume_id': '5e4f66be-193a-428e-ae80-03e6b55967d5', 'name': 'volume-5e4f66be-193a-428e-ae80-03e6b55967d5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '466cbf07-e945-48d4-a103-5a3ea2b7adf6', 'attached_at': '', 'detached_at': '', 'volume_id': '5e4f66be-193a-428e-ae80-03e6b55967d5', 'serial': '5e4f66be-193a-428e-ae80-03e6b55967d5'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 822.845983] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 822.846438] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6348a29e-69cf-444a-bc41-b38c3000c756 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.863811] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e017d206-28f6-4b51-9c72-168cc83cba31 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.868628] env[61898]: DEBUG oslo_concurrency.lockutils [req-a682c3da-8c8e-42f1-a275-49062150ab16 req-3b20b1d0-fac4-4d09-be41-9b91473cc1ac service nova] Releasing lock "refresh_cache-51f33e74-0bb3-488c-9a6d-d1ccc53f469b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.881107] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240695, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.885745] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 822.885745] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 822.885745] env[61898]: value = "task-1240696" [ 822.885745] env[61898]: _type = "Task" [ 822.885745] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.886319] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa1ae839-ea27-4be0-a698-c4fbb2ce8813 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.900890] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240696, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.984904] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 822.984904] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 822.984904] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Deleting the datastore file [datastore1] 466cbf07-e945-48d4-a103-5a3ea2b7adf6 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 822.984904] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c75ab095-9f86-4bf1-95b2-4dc8496ed622 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.994116] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for the task: (returnval){ [ 822.994116] env[61898]: value = "task-1240698" [ 822.994116] env[61898]: _type = "Task" [ 822.994116] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.011093] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240698, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.065137] env[61898]: DEBUG nova.compute.manager [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 823.065506] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 823.068026] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f248e63e-2bea-450e-a614-a03e1b962599 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.079397] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.079810] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26044d58-cb0c-472c-acd8-ed46d14ce6f9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.089078] env[61898]: DEBUG oslo_vmware.api [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 823.089078] env[61898]: value = "task-1240699" [ 823.089078] env[61898]: _type = "Task" [ 823.089078] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.101127] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.101728] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 823.107420] env[61898]: DEBUG oslo_vmware.api [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240699, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.107420] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.239s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.107420] env[61898]: DEBUG nova.objects.instance [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61898) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 823.213051] env[61898]: DEBUG nova.compute.manager [req-3291998a-052a-4ea0-ad5e-71e8447dc91b req-ce2cc8a6-dcfe-46f0-995a-49c2e4a88d11 service nova] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Received event network-vif-plugged-b5e10793-f18b-4c54-8373-45f9b9e9fd46 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 823.214047] env[61898]: DEBUG oslo_concurrency.lockutils [req-3291998a-052a-4ea0-ad5e-71e8447dc91b req-ce2cc8a6-dcfe-46f0-995a-49c2e4a88d11 service nova] Acquiring lock "eda63357-6749-4652-914a-dc5b69163eb6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.214047] env[61898]: DEBUG oslo_concurrency.lockutils [req-3291998a-052a-4ea0-ad5e-71e8447dc91b req-ce2cc8a6-dcfe-46f0-995a-49c2e4a88d11 service nova] Lock "eda63357-6749-4652-914a-dc5b69163eb6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.214047] env[61898]: DEBUG oslo_concurrency.lockutils [req-3291998a-052a-4ea0-ad5e-71e8447dc91b req-ce2cc8a6-dcfe-46f0-995a-49c2e4a88d11 service nova] Lock "eda63357-6749-4652-914a-dc5b69163eb6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.214210] env[61898]: DEBUG nova.compute.manager [req-3291998a-052a-4ea0-ad5e-71e8447dc91b req-ce2cc8a6-dcfe-46f0-995a-49c2e4a88d11 service nova] [instance: eda63357-6749-4652-914a-dc5b69163eb6] No waiting events found dispatching network-vif-plugged-b5e10793-f18b-4c54-8373-45f9b9e9fd46 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 823.214244] env[61898]: WARNING nova.compute.manager [req-3291998a-052a-4ea0-ad5e-71e8447dc91b req-ce2cc8a6-dcfe-46f0-995a-49c2e4a88d11 service nova] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Received unexpected event network-vif-plugged-b5e10793-f18b-4c54-8373-45f9b9e9fd46 for instance with vm_state building and task_state spawning. [ 823.239984] env[61898]: DEBUG nova.compute.manager [req-24d56c83-456f-4a64-b1e0-ff8b8afe7543 req-f96c86c8-b976-4fd5-87a8-0f66049d2665 service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Received event network-vif-deleted-9eb9e879-2a9d-4f9d-8a74-ae7d21738e53 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 823.240235] env[61898]: INFO nova.compute.manager [req-24d56c83-456f-4a64-b1e0-ff8b8afe7543 req-f96c86c8-b976-4fd5-87a8-0f66049d2665 service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Neutron deleted interface 9eb9e879-2a9d-4f9d-8a74-ae7d21738e53; detaching it from the instance and deleting it from the info cache [ 823.240468] env[61898]: DEBUG nova.network.neutron [req-24d56c83-456f-4a64-b1e0-ff8b8afe7543 req-f96c86c8-b976-4fd5-87a8-0f66049d2665 service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.253372] env[61898]: DEBUG nova.compute.manager [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 823.363615] env[61898]: DEBUG nova.network.neutron [-] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.380237] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240695, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.747786} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.380491] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 51f33e74-0bb3-488c-9a6d-d1ccc53f469b/51f33e74-0bb3-488c-9a6d-d1ccc53f469b.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.380724] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.380999] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c7c05cf-30f3-43fb-b009-bcc9b91c163a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.395015] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 823.395015] env[61898]: value = "task-1240700" [ 823.395015] env[61898]: _type = "Task" [ 823.395015] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.405471] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240696, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.411726] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240700, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.507269] env[61898]: DEBUG oslo_vmware.api [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Task: {'id': task-1240698, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177479} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.507667] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 823.507852] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 823.508087] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 823.527179] env[61898]: DEBUG nova.compute.manager [req-1cea03a4-ca7e-4f4b-ba7a-9e28f83e0ec0 req-10921c13-71bd-4850-8fd5-7a718a97be33 service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Received event network-changed-16ae0702-2627-4e8c-a2fc-a0e9d977bd4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 823.527179] env[61898]: DEBUG nova.compute.manager [req-1cea03a4-ca7e-4f4b-ba7a-9e28f83e0ec0 req-10921c13-71bd-4850-8fd5-7a718a97be33 service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Refreshing instance network info cache due to event network-changed-16ae0702-2627-4e8c-a2fc-a0e9d977bd4a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 823.527321] env[61898]: DEBUG oslo_concurrency.lockutils [req-1cea03a4-ca7e-4f4b-ba7a-9e28f83e0ec0 req-10921c13-71bd-4850-8fd5-7a718a97be33 service nova] Acquiring lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 823.527486] env[61898]: DEBUG oslo_concurrency.lockutils [req-1cea03a4-ca7e-4f4b-ba7a-9e28f83e0ec0 req-10921c13-71bd-4850-8fd5-7a718a97be33 service nova] Acquired lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.527627] env[61898]: DEBUG nova.network.neutron [req-1cea03a4-ca7e-4f4b-ba7a-9e28f83e0ec0 req-10921c13-71bd-4850-8fd5-7a718a97be33 service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Refreshing network info cache for port 16ae0702-2627-4e8c-a2fc-a0e9d977bd4a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.584637] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Volume detach. Driver type: vmdk {{(pid=61898) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 823.585063] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-774ede05-e8b8-4200-9429-4e278d329b19 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.598035] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7249e3f9-d3a1-4f92-93a4-df71c8d2c885 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.613794] env[61898]: DEBUG nova.compute.utils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 823.618446] env[61898]: DEBUG oslo_vmware.api [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240699, 'name': PowerOffVM_Task, 'duration_secs': 0.247859} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.619047] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 823.619222] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 823.621594] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 823.621662] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 823.621950] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-817f7383-066c-4b86-bdcc-5142745e9181 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.651969] env[61898]: ERROR nova.compute.manager [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Failed to detach volume 5e4f66be-193a-428e-ae80-03e6b55967d5 from /dev/sda: nova.exception.InstanceNotFound: Instance 466cbf07-e945-48d4-a103-5a3ea2b7adf6 could not be found. [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Traceback (most recent call last): [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 4184, in _do_rebuild_instance [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self.driver.rebuild(**kwargs) [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] raise NotImplementedError() [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] NotImplementedError [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] During handling of the above exception, another exception occurred: [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Traceback (most recent call last): [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 3607, in _detach_root_volume [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self.driver.detach_volume(context, old_connection_info, [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 555, in detach_volume [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] return self._volumeops.detach_volume(connection_info, instance) [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self._detach_volume_vmdk(connection_info, instance) [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] stable_ref.fetch_moref(session) [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] raise exception.InstanceNotFound(instance_id=self._uuid) [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] nova.exception.InstanceNotFound: Instance 466cbf07-e945-48d4-a103-5a3ea2b7adf6 could not be found. [ 823.651969] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] [ 823.705347] env[61898]: DEBUG nova.policy [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f64f22a09c344e468e74742efbd05cff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a476b83a7bda4078b4690a73adfea8c9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 823.713658] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 823.713658] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 823.713889] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Deleting the datastore file [datastore1] 626caecc-6389-4064-aafd-9968cee262ee {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 823.714250] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cabd2c92-ff16-4a94-8250-facf1a7621e7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.728027] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Successfully updated port: b5e10793-f18b-4c54-8373-45f9b9e9fd46 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 823.728721] env[61898]: DEBUG oslo_vmware.api [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 823.728721] env[61898]: value = "task-1240702" [ 823.728721] env[61898]: _type = "Task" [ 823.728721] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.738271] env[61898]: DEBUG oslo_vmware.api [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240702, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.747803] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45fda06f-8d7b-47a8-817a-33c406d1af3f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.759404] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9288b518-e6d8-49cc-af97-961ad38bb9aa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.807495] env[61898]: DEBUG nova.compute.manager [req-24d56c83-456f-4a64-b1e0-ff8b8afe7543 req-f96c86c8-b976-4fd5-87a8-0f66049d2665 service nova] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Detach interface failed, port_id=9eb9e879-2a9d-4f9d-8a74-ae7d21738e53, reason: Instance 52a584e1-61ae-447d-90e0-e15d32a96314 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 823.811834] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.868937] env[61898]: INFO nova.compute.manager [-] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Took 1.47 seconds to deallocate network for instance. [ 823.887343] env[61898]: DEBUG nova.compute.utils [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Build of instance 466cbf07-e945-48d4-a103-5a3ea2b7adf6 aborted: Failed to rebuild volume backed instance. {{(pid=61898) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 823.893275] env[61898]: ERROR nova.compute.manager [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 466cbf07-e945-48d4-a103-5a3ea2b7adf6 aborted: Failed to rebuild volume backed instance. [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Traceback (most recent call last): [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 4184, in _do_rebuild_instance [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self.driver.rebuild(**kwargs) [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] raise NotImplementedError() [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] NotImplementedError [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] During handling of the above exception, another exception occurred: [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Traceback (most recent call last): [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 3642, in _rebuild_volume_backed_instance [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self._detach_root_volume(context, instance, root_bdm) [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 3621, in _detach_root_volume [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] with excutils.save_and_reraise_exception(): [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self.force_reraise() [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] raise self.value [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 3607, in _detach_root_volume [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self.driver.detach_volume(context, old_connection_info, [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 555, in detach_volume [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] return self._volumeops.detach_volume(connection_info, instance) [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self._detach_volume_vmdk(connection_info, instance) [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] stable_ref.fetch_moref(session) [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] raise exception.InstanceNotFound(instance_id=self._uuid) [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] nova.exception.InstanceNotFound: Instance 466cbf07-e945-48d4-a103-5a3ea2b7adf6 could not be found. [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] During handling of the above exception, another exception occurred: [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Traceback (most recent call last): [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 11194, in _error_out_instance_on_exception [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] yield [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 3910, in rebuild_instance [ 823.893275] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self._do_rebuild_instance_with_claim( [ 823.894718] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 3996, in _do_rebuild_instance_with_claim [ 823.894718] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self._do_rebuild_instance( [ 823.894718] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 4188, in _do_rebuild_instance [ 823.894718] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self._rebuild_default_impl(**kwargs) [ 823.894718] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 3765, in _rebuild_default_impl [ 823.894718] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] self._rebuild_volume_backed_instance( [ 823.894718] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] File "/opt/stack/nova/nova/compute/manager.py", line 3657, in _rebuild_volume_backed_instance [ 823.894718] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] raise exception.BuildAbortException( [ 823.894718] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] nova.exception.BuildAbortException: Build of instance 466cbf07-e945-48d4-a103-5a3ea2b7adf6 aborted: Failed to rebuild volume backed instance. [ 823.894718] env[61898]: ERROR nova.compute.manager [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] [ 823.906920] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240696, 'name': ReconfigVM_Task, 'duration_secs': 0.646698} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.906920] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Reconfigured VM instance instance-00000048 to attach disk [datastore2] e19e820c-154d-4e91-8631-dab9439d11a2/e19e820c-154d-4e91-8631-dab9439d11a2.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 823.906920] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5d22c59-eabd-444f-8c02-5731c7495330 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.911465] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240700, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130131} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.912133] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 823.912929] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9ed524-b18e-45a7-9a85-324382339eba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.918413] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 823.918413] env[61898]: value = "task-1240703" [ 823.918413] env[61898]: _type = "Task" [ 823.918413] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.943175] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] 51f33e74-0bb3-488c-9a6d-d1ccc53f469b/51f33e74-0bb3-488c-9a6d-d1ccc53f469b.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.943327] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc1b1baa-b645-4608-82ff-bd71658f43cf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.962553] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240703, 'name': Rename_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.969972] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 823.969972] env[61898]: value = "task-1240704" [ 823.969972] env[61898]: _type = "Task" [ 823.969972] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.979908] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240704, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.120342] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 824.123928] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dcaa3589-47fb-4cc0-a4dc-6631b0964199 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.125385] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.197s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.129600] env[61898]: INFO nova.compute.claims [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.231021] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "refresh_cache-eda63357-6749-4652-914a-dc5b69163eb6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.231021] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "refresh_cache-eda63357-6749-4652-914a-dc5b69163eb6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.231021] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.248043] env[61898]: DEBUG oslo_vmware.api [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240702, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157823} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.248350] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 824.248603] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 824.248873] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 824.249063] env[61898]: INFO nova.compute.manager [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Took 1.18 seconds to destroy the instance on the hypervisor. [ 824.249367] env[61898]: DEBUG oslo.service.loopingcall [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.249570] env[61898]: DEBUG nova.compute.manager [-] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 824.249688] env[61898]: DEBUG nova.network.neutron [-] [instance: 626caecc-6389-4064-aafd-9968cee262ee] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.265997] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Successfully created port: 6da9af79-d8f3-454e-b392-246ae38dc236 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.375220] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.435901] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240703, 'name': Rename_Task, 'duration_secs': 0.231012} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.436318] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.436658] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad6efcd0-325a-4312-9b2f-41c570ffe2d1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.445253] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 824.445253] env[61898]: value = "task-1240705" [ 824.445253] env[61898]: _type = "Task" [ 824.445253] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.453389] env[61898]: DEBUG nova.network.neutron [req-1cea03a4-ca7e-4f4b-ba7a-9e28f83e0ec0 req-10921c13-71bd-4850-8fd5-7a718a97be33 service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Updated VIF entry in instance network info cache for port 16ae0702-2627-4e8c-a2fc-a0e9d977bd4a. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.453746] env[61898]: DEBUG nova.network.neutron [req-1cea03a4-ca7e-4f4b-ba7a-9e28f83e0ec0 req-10921c13-71bd-4850-8fd5-7a718a97be33 service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Updating instance_info_cache with network_info: [{"id": "16ae0702-2627-4e8c-a2fc-a0e9d977bd4a", "address": "fa:16:3e:ab:76:29", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ae0702-26", "ovs_interfaceid": "16ae0702-2627-4e8c-a2fc-a0e9d977bd4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.462724] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240705, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.466036] env[61898]: DEBUG oslo_vmware.rw_handles [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cc28ee-f099-4a1f-ab75-3ba91fee6db6/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 824.466925] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6312645-2ce8-4e62-bc76-019d29188b72 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.477407] env[61898]: DEBUG oslo_vmware.rw_handles [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cc28ee-f099-4a1f-ab75-3ba91fee6db6/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 824.477584] env[61898]: ERROR oslo_vmware.rw_handles [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cc28ee-f099-4a1f-ab75-3ba91fee6db6/disk-0.vmdk due to incomplete transfer. [ 824.478518] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-09d54a8e-ae2f-46f1-93af-f4ff8fa7fd6c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.484159] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240704, 'name': ReconfigVM_Task, 'duration_secs': 0.347694} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.484159] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Reconfigured VM instance instance-00000049 to attach disk [datastore2] 51f33e74-0bb3-488c-9a6d-d1ccc53f469b/51f33e74-0bb3-488c-9a6d-d1ccc53f469b.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.485016] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-594af5ed-c08f-4109-9382-a350d6977231 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.492401] env[61898]: DEBUG oslo_vmware.rw_handles [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cc28ee-f099-4a1f-ab75-3ba91fee6db6/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 824.492401] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Uploaded image d0979276-b6d6-4292-baee-aeebb173bc3a to the Glance image server {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 824.494181] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 824.495793] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2693faa6-9571-496e-89b1-00d34e80bb4b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.497794] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 824.497794] env[61898]: value = "task-1240706" [ 824.497794] env[61898]: _type = "Task" [ 824.497794] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.505373] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 824.505373] env[61898]: value = "task-1240707" [ 824.505373] env[61898]: _type = "Task" [ 824.505373] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.509684] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240706, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.515752] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240707, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.774102] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.956123] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Updating instance_info_cache with network_info: [{"id": "b5e10793-f18b-4c54-8373-45f9b9e9fd46", "address": "fa:16:3e:98:9e:9f", "network": {"id": "417c9ff9-1aed-4a2f-95cd-3baf6bf12936", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-259987027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a476b83a7bda4078b4690a73adfea8c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5e10793-f1", "ovs_interfaceid": "b5e10793-f18b-4c54-8373-45f9b9e9fd46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.961206] env[61898]: DEBUG oslo_concurrency.lockutils [req-1cea03a4-ca7e-4f4b-ba7a-9e28f83e0ec0 req-10921c13-71bd-4850-8fd5-7a718a97be33 service nova] Releasing lock "refresh_cache-626caecc-6389-4064-aafd-9968cee262ee" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.968382] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240705, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.010391] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240706, 'name': Rename_Task, 'duration_secs': 0.175866} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.011162] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.011492] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b1b970c-6b74-4174-9876-df3e78af0481 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.016180] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240707, 'name': Destroy_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.021830] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 825.021830] env[61898]: value = "task-1240708" [ 825.021830] env[61898]: _type = "Task" [ 825.021830] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.031742] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240708, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.135479] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 825.168465] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 825.169862] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 825.170268] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.170588] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 825.170866] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.171320] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 825.171500] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 825.171817] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 825.172268] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 825.172502] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 825.172829] env[61898]: DEBUG nova.virt.hardware [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 825.174241] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8e6ea7-7605-42cf-b25f-b6767f69ba16 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.190910] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903e3fb4-91b7-4497-b9c7-1aa8ad403b7a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.427035] env[61898]: DEBUG nova.network.neutron [-] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.461236] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "refresh_cache-eda63357-6749-4652-914a-dc5b69163eb6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.461236] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Instance network_info: |[{"id": "b5e10793-f18b-4c54-8373-45f9b9e9fd46", "address": "fa:16:3e:98:9e:9f", "network": {"id": "417c9ff9-1aed-4a2f-95cd-3baf6bf12936", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-259987027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a476b83a7bda4078b4690a73adfea8c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5e10793-f1", "ovs_interfaceid": "b5e10793-f18b-4c54-8373-45f9b9e9fd46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 825.464409] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:9e:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5e10793-f18b-4c54-8373-45f9b9e9fd46', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 825.476549] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Creating folder: Project (a476b83a7bda4078b4690a73adfea8c9). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.478052] env[61898]: DEBUG oslo_vmware.api [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240705, 'name': PowerOnVM_Task, 'duration_secs': 0.753587} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.481077] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b3917fc-b53b-45de-a68e-509c44535d76 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.482888] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.483235] env[61898]: INFO nova.compute.manager [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Took 10.15 seconds to spawn the instance on the hypervisor. [ 825.483556] env[61898]: DEBUG nova.compute.manager [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 825.484859] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e2db3e-f322-440e-a152-a12757ff7977 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.499823] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Created folder: Project (a476b83a7bda4078b4690a73adfea8c9) in parent group-v267550. [ 825.500307] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Creating folder: Instances. Parent ref: group-v267647. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.501348] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-21a5b205-ba9a-4e7b-a819-44efca727cfe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.514220] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Created folder: Instances in parent group-v267647. [ 825.514828] env[61898]: DEBUG oslo.service.loopingcall [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 825.521377] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 825.524021] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240707, 'name': Destroy_Task, 'duration_secs': 0.602179} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.524021] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e38d2a7-1ca4-4c94-b33d-76c9c2abb220 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.540385] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Destroyed the VM [ 825.540859] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 825.547187] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-df282e5b-ffbe-41bd-9ccd-480d995e318b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.553802] env[61898]: DEBUG nova.compute.manager [req-2935ad58-3f04-44cb-9ca2-c03889746dbc req-8503c76c-1feb-4717-a94a-843e947a1303 service nova] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Received event network-changed-b5e10793-f18b-4c54-8373-45f9b9e9fd46 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 825.554552] env[61898]: DEBUG nova.compute.manager [req-2935ad58-3f04-44cb-9ca2-c03889746dbc req-8503c76c-1feb-4717-a94a-843e947a1303 service nova] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Refreshing instance network info cache due to event network-changed-b5e10793-f18b-4c54-8373-45f9b9e9fd46. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 825.554552] env[61898]: DEBUG oslo_concurrency.lockutils [req-2935ad58-3f04-44cb-9ca2-c03889746dbc req-8503c76c-1feb-4717-a94a-843e947a1303 service nova] Acquiring lock "refresh_cache-eda63357-6749-4652-914a-dc5b69163eb6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.554700] env[61898]: DEBUG oslo_concurrency.lockutils [req-2935ad58-3f04-44cb-9ca2-c03889746dbc req-8503c76c-1feb-4717-a94a-843e947a1303 service nova] Acquired lock "refresh_cache-eda63357-6749-4652-914a-dc5b69163eb6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.554902] env[61898]: DEBUG nova.network.neutron [req-2935ad58-3f04-44cb-9ca2-c03889746dbc req-8503c76c-1feb-4717-a94a-843e947a1303 service nova] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Refreshing network info cache for port b5e10793-f18b-4c54-8373-45f9b9e9fd46 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 825.564658] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 825.564658] env[61898]: value = "task-1240711" [ 825.564658] env[61898]: _type = "Task" [ 825.564658] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.565175] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240708, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.565436] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 825.565436] env[61898]: value = "task-1240712" [ 825.565436] env[61898]: _type = "Task" [ 825.565436] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.590939] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240712, 'name': CreateVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.591610] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240711, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.593372] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff6fe89-aafc-45ae-8dab-d1ae18460a4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.602395] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2670f1f6-e42a-40c3-8c5b-43113b122ab2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.645589] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155b0f6b-b411-4d09-8f40-9dd927f587b7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.653185] env[61898]: DEBUG nova.compute.manager [req-5ad6d583-3189-469f-b68c-f9420b4bdb0d req-76bb3a94-c269-417c-a7e8-3ba315b584db service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Received event network-changed-0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 825.653185] env[61898]: DEBUG nova.compute.manager [req-5ad6d583-3189-469f-b68c-f9420b4bdb0d req-76bb3a94-c269-417c-a7e8-3ba315b584db service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Refreshing instance network info cache due to event network-changed-0d2007ae-42aa-44eb-9414-3216e1c433d4. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 825.653185] env[61898]: DEBUG oslo_concurrency.lockutils [req-5ad6d583-3189-469f-b68c-f9420b4bdb0d req-76bb3a94-c269-417c-a7e8-3ba315b584db service nova] Acquiring lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.653185] env[61898]: DEBUG oslo_concurrency.lockutils [req-5ad6d583-3189-469f-b68c-f9420b4bdb0d req-76bb3a94-c269-417c-a7e8-3ba315b584db service nova] Acquired lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.653185] env[61898]: DEBUG nova.network.neutron [req-5ad6d583-3189-469f-b68c-f9420b4bdb0d req-76bb3a94-c269-417c-a7e8-3ba315b584db service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Refreshing network info cache for port 0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 825.659818] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54fdeda-4280-437a-8dc5-184705844a97 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.677519] env[61898]: DEBUG nova.compute.provider_tree [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.918871] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.928434] env[61898]: INFO nova.compute.manager [-] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Took 1.68 seconds to deallocate network for instance. [ 826.010403] env[61898]: DEBUG nova.compute.manager [req-b915174a-c5b5-496e-ac5b-336cf5840fc3 req-893533f7-30bf-4a58-a178-0ad9922e9e78 service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Received event network-vif-plugged-6da9af79-d8f3-454e-b392-246ae38dc236 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 826.010403] env[61898]: DEBUG oslo_concurrency.lockutils [req-b915174a-c5b5-496e-ac5b-336cf5840fc3 req-893533f7-30bf-4a58-a178-0ad9922e9e78 service nova] Acquiring lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.010403] env[61898]: DEBUG oslo_concurrency.lockutils [req-b915174a-c5b5-496e-ac5b-336cf5840fc3 req-893533f7-30bf-4a58-a178-0ad9922e9e78 service nova] Lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.010403] env[61898]: DEBUG oslo_concurrency.lockutils [req-b915174a-c5b5-496e-ac5b-336cf5840fc3 req-893533f7-30bf-4a58-a178-0ad9922e9e78 service nova] Lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.010403] env[61898]: DEBUG nova.compute.manager [req-b915174a-c5b5-496e-ac5b-336cf5840fc3 req-893533f7-30bf-4a58-a178-0ad9922e9e78 service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] No waiting events found dispatching network-vif-plugged-6da9af79-d8f3-454e-b392-246ae38dc236 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 826.010403] env[61898]: WARNING nova.compute.manager [req-b915174a-c5b5-496e-ac5b-336cf5840fc3 req-893533f7-30bf-4a58-a178-0ad9922e9e78 service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Received unexpected event network-vif-plugged-6da9af79-d8f3-454e-b392-246ae38dc236 for instance with vm_state building and task_state spawning. [ 826.014553] env[61898]: INFO nova.compute.manager [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Took 32.51 seconds to build instance. [ 826.052469] env[61898]: DEBUG oslo_vmware.api [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240708, 'name': PowerOnVM_Task, 'duration_secs': 0.610321} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.053346] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 826.053346] env[61898]: INFO nova.compute.manager [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Took 8.10 seconds to spawn the instance on the hypervisor. [ 826.053682] env[61898]: DEBUG nova.compute.manager [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 826.054379] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceeff431-5e8b-431e-a437-9ef132e6bb75 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.092094] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240711, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.092094] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240712, 'name': CreateVM_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.102134] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Successfully updated port: 6da9af79-d8f3-454e-b392-246ae38dc236 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 826.145286] env[61898]: DEBUG nova.network.neutron [req-2935ad58-3f04-44cb-9ca2-c03889746dbc req-8503c76c-1feb-4717-a94a-843e947a1303 service nova] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Updated VIF entry in instance network info cache for port b5e10793-f18b-4c54-8373-45f9b9e9fd46. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 826.145286] env[61898]: DEBUG nova.network.neutron [req-2935ad58-3f04-44cb-9ca2-c03889746dbc req-8503c76c-1feb-4717-a94a-843e947a1303 service nova] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Updating instance_info_cache with network_info: [{"id": "b5e10793-f18b-4c54-8373-45f9b9e9fd46", "address": "fa:16:3e:98:9e:9f", "network": {"id": "417c9ff9-1aed-4a2f-95cd-3baf6bf12936", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-259987027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a476b83a7bda4078b4690a73adfea8c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5e10793-f1", "ovs_interfaceid": "b5e10793-f18b-4c54-8373-45f9b9e9fd46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.181226] env[61898]: DEBUG nova.scheduler.client.report [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 826.390442] env[61898]: DEBUG nova.network.neutron [req-5ad6d583-3189-469f-b68c-f9420b4bdb0d req-76bb3a94-c269-417c-a7e8-3ba315b584db service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updated VIF entry in instance network info cache for port 0d2007ae-42aa-44eb-9414-3216e1c433d4. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 826.390859] env[61898]: DEBUG nova.network.neutron [req-5ad6d583-3189-469f-b68c-f9420b4bdb0d req-76bb3a94-c269-417c-a7e8-3ba315b584db service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updating instance_info_cache with network_info: [{"id": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "address": "fa:16:3e:89:6e:d4", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d2007ae-42", "ovs_interfaceid": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.437328] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.519118] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1bf06b69-73b7-466c-bea6-28b5758e4514 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "e19e820c-154d-4e91-8631-dab9439d11a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.989s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.580383] env[61898]: INFO nova.compute.manager [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Took 30.43 seconds to build instance. [ 826.588307] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240711, 'name': RemoveSnapshot_Task, 'duration_secs': 0.747719} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.592325] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 826.592325] env[61898]: DEBUG nova.compute.manager [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 826.592325] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240712, 'name': CreateVM_Task, 'duration_secs': 0.643005} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.593110] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6256a678-3ac3-40c9-83d9-d39f4cf94628 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.595402] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 826.596600] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.596800] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.597221] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 826.598088] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-536b0caf-0410-49ab-ae17-750a875f7e78 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.605947] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "refresh_cache-5fc14058-7953-4e6a-a9ef-7933d61e9f3e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.606101] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "refresh_cache-5fc14058-7953-4e6a-a9ef-7933d61e9f3e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.606246] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.608223] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 826.608223] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52226690-c371-8079-5834-07f1c254ff96" [ 826.608223] env[61898]: _type = "Task" [ 826.608223] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.621216] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52226690-c371-8079-5834-07f1c254ff96, 'name': SearchDatastore_Task, 'duration_secs': 0.011921} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.621511] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.621738] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 826.621960] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.622122] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.622307] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 826.622754] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-987be769-1655-4fa6-a814-b6fc84e17b69 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.632060] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 826.632060] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 826.632736] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40eb5681-f6a9-4968-a625-1b56772f29c1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.638336] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 826.638336] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]529080b6-6b8f-a7df-dfe5-03b7695710c7" [ 826.638336] env[61898]: _type = "Task" [ 826.638336] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.647636] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]529080b6-6b8f-a7df-dfe5-03b7695710c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.648110] env[61898]: DEBUG oslo_concurrency.lockutils [req-2935ad58-3f04-44cb-9ca2-c03889746dbc req-8503c76c-1feb-4717-a94a-843e947a1303 service nova] Releasing lock "refresh_cache-eda63357-6749-4652-914a-dc5b69163eb6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.688024] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.561s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.688024] env[61898]: DEBUG nova.compute.manager [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 826.689874] env[61898]: DEBUG oslo_concurrency.lockutils [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.709s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.690550] env[61898]: DEBUG nova.objects.instance [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lazy-loading 'resources' on Instance uuid b106ab9e-08d4-4d18-90e0-13a071c9efb1 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 826.732346] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "e19e820c-154d-4e91-8631-dab9439d11a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.732346] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "e19e820c-154d-4e91-8631-dab9439d11a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.732346] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "e19e820c-154d-4e91-8631-dab9439d11a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.732595] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "e19e820c-154d-4e91-8631-dab9439d11a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.732630] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "e19e820c-154d-4e91-8631-dab9439d11a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.734693] env[61898]: INFO nova.compute.manager [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Terminating instance [ 826.894112] env[61898]: DEBUG oslo_concurrency.lockutils [req-5ad6d583-3189-469f-b68c-f9420b4bdb0d req-76bb3a94-c269-417c-a7e8-3ba315b584db service nova] Releasing lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.894404] env[61898]: DEBUG nova.compute.manager [req-5ad6d583-3189-469f-b68c-f9420b4bdb0d req-76bb3a94-c269-417c-a7e8-3ba315b584db service nova] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Received event network-vif-deleted-16ae0702-2627-4e8c-a2fc-a0e9d977bd4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 827.083016] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b1ee9283-e49d-4ff9-a32b-f1aac8b0cd46 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.161s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.116396] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Acquiring lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.116703] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.116923] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Acquiring lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.117343] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.117520] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.119100] env[61898]: INFO nova.compute.manager [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Shelve offloading [ 827.120779] env[61898]: INFO nova.compute.manager [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Terminating instance [ 827.149705] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]529080b6-6b8f-a7df-dfe5-03b7695710c7, 'name': SearchDatastore_Task, 'duration_secs': 0.00873} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.150187] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5bfe095-9057-4b73-969e-b7201e7ff2c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.152871] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.158116] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 827.158116] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52743352-0ec3-3247-c584-cf950104c983" [ 827.158116] env[61898]: _type = "Task" [ 827.158116] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.169020] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52743352-0ec3-3247-c584-cf950104c983, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.194427] env[61898]: DEBUG nova.compute.utils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.200633] env[61898]: DEBUG nova.compute.manager [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 827.200819] env[61898]: DEBUG nova.network.neutron [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.240426] env[61898]: DEBUG nova.compute.manager [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 827.240538] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 827.241778] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed4ffbb-adb9-4009-94d5-d15cc7311300 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.254020] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 827.254288] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1b7b419-9de4-4ce0-bafa-7c18eae18062 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.263128] env[61898]: DEBUG oslo_vmware.api [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 827.263128] env[61898]: value = "task-1240713" [ 827.263128] env[61898]: _type = "Task" [ 827.263128] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.276210] env[61898]: DEBUG oslo_vmware.api [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240713, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.278781] env[61898]: DEBUG nova.policy [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a910d0cdf3cd4b17af818abd25a38b79', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ce0562f486e44cc877c1cc31525a13a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 827.378106] env[61898]: DEBUG nova.network.neutron [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Updating instance_info_cache with network_info: [{"id": "6da9af79-d8f3-454e-b392-246ae38dc236", "address": "fa:16:3e:ea:42:49", "network": {"id": "417c9ff9-1aed-4a2f-95cd-3baf6bf12936", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-259987027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a476b83a7bda4078b4690a73adfea8c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da9af79-d8", "ovs_interfaceid": "6da9af79-d8f3-454e-b392-246ae38dc236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.562102] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d321684-882f-4b6c-a992-1db2074da407 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.573242] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1650b024-f9a5-4de8-8cb2-db12781c8f19 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.583425] env[61898]: DEBUG nova.compute.manager [req-e79160c7-0604-4c00-b970-0a222a136e9c req-e714afd2-ba8a-41b9-bd8d-8d049d6e9d58 service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Received event network-changed-6da9af79-d8f3-454e-b392-246ae38dc236 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 827.583632] env[61898]: DEBUG nova.compute.manager [req-e79160c7-0604-4c00-b970-0a222a136e9c req-e714afd2-ba8a-41b9-bd8d-8d049d6e9d58 service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Refreshing instance network info cache due to event network-changed-6da9af79-d8f3-454e-b392-246ae38dc236. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 827.583761] env[61898]: DEBUG oslo_concurrency.lockutils [req-e79160c7-0604-4c00-b970-0a222a136e9c req-e714afd2-ba8a-41b9-bd8d-8d049d6e9d58 service nova] Acquiring lock "refresh_cache-5fc14058-7953-4e6a-a9ef-7933d61e9f3e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.616188] env[61898]: DEBUG nova.network.neutron [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Successfully created port: 5650e9db-397e-427c-903b-85817fe18e52 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.618506] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d94fa1-d8ce-4284-addd-ba47218908a5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.624396] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 827.627529] env[61898]: DEBUG nova.compute.manager [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 827.627803] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c22f4ff-9bd7-4df5-a344-45de987ab1d3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.631220] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-849d44f2-ec64-4612-a92c-ecd38469587a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.633719] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123ccbf5-2d72-4e0f-a9e0-2cdbf19fd315 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.649566] env[61898]: DEBUG nova.compute.provider_tree [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.652760] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 827.652760] env[61898]: value = "task-1240714" [ 827.652760] env[61898]: _type = "Task" [ 827.652760] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.655843] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161c6c81-fa41-49fd-9cda-268fd4fc2935 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.683274] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] VM already powered off {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 827.683520] env[61898]: DEBUG nova.compute.manager [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 827.684726] env[61898]: DEBUG nova.compute.manager [req-62271503-c89a-43ba-bbed-28212808d793 req-adf3e1c0-3dba-45e3-9110-a27c9c5c6b02 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Received event network-changed-0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 827.684906] env[61898]: DEBUG nova.compute.manager [req-62271503-c89a-43ba-bbed-28212808d793 req-adf3e1c0-3dba-45e3-9110-a27c9c5c6b02 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Refreshing instance network info cache due to event network-changed-0d2007ae-42aa-44eb-9414-3216e1c433d4. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 827.685138] env[61898]: DEBUG oslo_concurrency.lockutils [req-62271503-c89a-43ba-bbed-28212808d793 req-adf3e1c0-3dba-45e3-9110-a27c9c5c6b02 service nova] Acquiring lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.685285] env[61898]: DEBUG oslo_concurrency.lockutils [req-62271503-c89a-43ba-bbed-28212808d793 req-adf3e1c0-3dba-45e3-9110-a27c9c5c6b02 service nova] Acquired lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.685447] env[61898]: DEBUG nova.network.neutron [req-62271503-c89a-43ba-bbed-28212808d793 req-adf3e1c0-3dba-45e3-9110-a27c9c5c6b02 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Refreshing network info cache for port 0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.686844] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52743352-0ec3-3247-c584-cf950104c983, 'name': SearchDatastore_Task, 'duration_secs': 0.011419} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.697995] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ab7e29-dc69-4187-81c9-c66502058d68 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.700551] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.700815] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] eda63357-6749-4652-914a-dc5b69163eb6/eda63357-6749-4652-914a-dc5b69163eb6.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 827.702044] env[61898]: WARNING nova.virt.vmwareapi.driver [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 466cbf07-e945-48d4-a103-5a3ea2b7adf6 could not be found. [ 827.702231] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 827.702753] env[61898]: DEBUG nova.compute.manager [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 827.705016] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c6c9ec5-daf7-415a-94b2-5b3626742153 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.706775] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36681052-2fa1-4a3b-90a7-d10ee66dca4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.713349] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.713543] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.713744] env[61898]: DEBUG nova.network.neutron [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 827.716765] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 827.716765] env[61898]: value = "task-1240715" [ 827.716765] env[61898]: _type = "Task" [ 827.716765] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.720058] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac83708f-e48a-4acc-9e7c-3fe5fd30ca0e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.743424] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240715, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.761731] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 466cbf07-e945-48d4-a103-5a3ea2b7adf6 could not be found. [ 827.761946] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 827.762673] env[61898]: INFO nova.compute.manager [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Took 0.13 seconds to destroy the instance on the hypervisor. [ 827.762673] env[61898]: DEBUG oslo.service.loopingcall [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.762673] env[61898]: DEBUG nova.compute.manager [-] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 827.762900] env[61898]: DEBUG nova.network.neutron [-] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 827.773394] env[61898]: DEBUG oslo_vmware.api [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240713, 'name': PowerOffVM_Task, 'duration_secs': 0.177915} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.773678] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 827.773883] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 827.774148] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7091cd67-30a9-4425-b608-c50941b7da29 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.810745] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.810945] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.811209] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.811316] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.811488] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.815806] env[61898]: INFO nova.compute.manager [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Terminating instance [ 827.843189] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 827.843430] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 827.843617] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Deleting the datastore file [datastore2] e19e820c-154d-4e91-8631-dab9439d11a2 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 827.843890] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ebe9535-afe9-4964-b184-05645d7ca8a2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.850828] env[61898]: DEBUG oslo_vmware.api [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 827.850828] env[61898]: value = "task-1240717" [ 827.850828] env[61898]: _type = "Task" [ 827.850828] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.859643] env[61898]: DEBUG oslo_vmware.api [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240717, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.880630] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "refresh_cache-5fc14058-7953-4e6a-a9ef-7933d61e9f3e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.880979] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Instance network_info: |[{"id": "6da9af79-d8f3-454e-b392-246ae38dc236", "address": "fa:16:3e:ea:42:49", "network": {"id": "417c9ff9-1aed-4a2f-95cd-3baf6bf12936", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-259987027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a476b83a7bda4078b4690a73adfea8c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da9af79-d8", "ovs_interfaceid": "6da9af79-d8f3-454e-b392-246ae38dc236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 827.881346] env[61898]: DEBUG oslo_concurrency.lockutils [req-e79160c7-0604-4c00-b970-0a222a136e9c req-e714afd2-ba8a-41b9-bd8d-8d049d6e9d58 service nova] Acquired lock "refresh_cache-5fc14058-7953-4e6a-a9ef-7933d61e9f3e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.882322] env[61898]: DEBUG nova.network.neutron [req-e79160c7-0604-4c00-b970-0a222a136e9c req-e714afd2-ba8a-41b9-bd8d-8d049d6e9d58 service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Refreshing network info cache for port 6da9af79-d8f3-454e-b392-246ae38dc236 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.882815] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:42:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6da9af79-d8f3-454e-b392-246ae38dc236', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 827.890871] env[61898]: DEBUG oslo.service.loopingcall [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.892011] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 827.892348] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f7b6c66-f3ef-4ac3-8f37-eea8a6639118 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.916053] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 827.916053] env[61898]: value = "task-1240718" [ 827.916053] env[61898]: _type = "Task" [ 827.916053] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.931153] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240718, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.155238] env[61898]: DEBUG nova.scheduler.client.report [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 828.236083] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240715, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503847} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.236366] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] eda63357-6749-4652-914a-dc5b69163eb6/eda63357-6749-4652-914a-dc5b69163eb6.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 828.238060] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 828.238060] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f2bdf4df-a80f-4811-8f06-953020327ef9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.245553] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 828.245553] env[61898]: value = "task-1240719" [ 828.245553] env[61898]: _type = "Task" [ 828.245553] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.255566] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240719, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.318771] env[61898]: DEBUG nova.compute.manager [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 828.318966] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 828.320159] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27cf23a-89b7-45fd-b5bd-57bb05afcd15 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.329266] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 828.331770] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb162cc7-c4cc-449c-abbf-ef8405ca5321 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.340122] env[61898]: DEBUG oslo_vmware.api [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 828.340122] env[61898]: value = "task-1240720" [ 828.340122] env[61898]: _type = "Task" [ 828.340122] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.357237] env[61898]: DEBUG oslo_vmware.api [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.367664] env[61898]: DEBUG oslo_vmware.api [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240717, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35507} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.367959] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 828.368250] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 828.368454] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 828.368657] env[61898]: INFO nova.compute.manager [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 828.368877] env[61898]: DEBUG oslo.service.loopingcall [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 828.369161] env[61898]: DEBUG nova.compute.manager [-] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 828.369303] env[61898]: DEBUG nova.network.neutron [-] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.430931] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240718, 'name': CreateVM_Task, 'duration_secs': 0.483938} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.431208] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 828.431939] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.432130] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.432448] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 828.432710] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a04355a-e63a-45af-8691-1b0b87c81b58 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.438046] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 828.438046] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d7eda9-1a38-e788-c8bc-3f8418e2bd2a" [ 828.438046] env[61898]: _type = "Task" [ 828.438046] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.448509] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d7eda9-1a38-e788-c8bc-3f8418e2bd2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.662229] env[61898]: DEBUG oslo_concurrency.lockutils [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.972s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.665277] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.301s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.665543] env[61898]: DEBUG nova.objects.instance [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Lazy-loading 'resources' on Instance uuid b709df92-bf56-40ed-ba48-a8fa19be8b68 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 828.687153] env[61898]: INFO nova.scheduler.client.report [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted allocations for instance b106ab9e-08d4-4d18-90e0-13a071c9efb1 [ 828.715924] env[61898]: DEBUG nova.compute.manager [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 828.745144] env[61898]: DEBUG nova.virt.hardware [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 828.745438] env[61898]: DEBUG nova.virt.hardware [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 828.745760] env[61898]: DEBUG nova.virt.hardware [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.745986] env[61898]: DEBUG nova.virt.hardware [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 828.746173] env[61898]: DEBUG nova.virt.hardware [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.746327] env[61898]: DEBUG nova.virt.hardware [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 828.746534] env[61898]: DEBUG nova.virt.hardware [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 828.746735] env[61898]: DEBUG nova.virt.hardware [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 828.747648] env[61898]: DEBUG nova.virt.hardware [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 828.747648] env[61898]: DEBUG nova.virt.hardware [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 828.747648] env[61898]: DEBUG nova.virt.hardware [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 828.748541] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aea744a-dbfb-4520-af33-50bb0cb90e89 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.765394] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19112a5e-0085-4fbb-adc0-a0d499dd77cf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.769583] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240719, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078327} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.769844] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 828.770956] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d1e2b4-c313-4514-9073-7bb63279f0e4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.801368] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] eda63357-6749-4652-914a-dc5b69163eb6/eda63357-6749-4652-914a-dc5b69163eb6.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 828.801368] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01ecf7f5-4ad4-44b6-902e-b4b175518d7f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.827358] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 828.827358] env[61898]: value = "task-1240721" [ 828.827358] env[61898]: _type = "Task" [ 828.827358] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.837337] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240721, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.852269] env[61898]: DEBUG oslo_vmware.api [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240720, 'name': PowerOffVM_Task, 'duration_secs': 0.196607} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.852598] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 828.852763] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 828.853030] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ce7d8cd-b230-48cc-a79b-3f6a63c191c8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.923013] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 828.923324] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 828.923793] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleting the datastore file [datastore2] 51f33e74-0bb3-488c-9a6d-d1ccc53f469b {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 828.923987] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7554c835-6c85-432c-a9e9-b9090894b4e9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.932143] env[61898]: DEBUG oslo_vmware.api [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 828.932143] env[61898]: value = "task-1240723" [ 828.932143] env[61898]: _type = "Task" [ 828.932143] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.943350] env[61898]: DEBUG oslo_vmware.api [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240723, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.956255] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d7eda9-1a38-e788-c8bc-3f8418e2bd2a, 'name': SearchDatastore_Task, 'duration_secs': 0.010739} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.956614] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.956868] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 828.957123] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.958080] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.958180] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 828.958463] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7fa49dc-eedc-4d8c-a222-a5c85c7e17e7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.968347] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 828.968558] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 828.970153] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34fc03d8-1626-4908-949c-1a93bf524faa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.976225] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 828.976225] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52076c2b-c04b-a9c0-3994-67ddaff8fc38" [ 828.976225] env[61898]: _type = "Task" [ 828.976225] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.985564] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52076c2b-c04b-a9c0-3994-67ddaff8fc38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.131845] env[61898]: DEBUG nova.network.neutron [-] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.195100] env[61898]: DEBUG oslo_concurrency.lockutils [None req-848bf71c-5a39-4fa0-bd44-4dbe6134abe1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "b106ab9e-08d4-4d18-90e0-13a071c9efb1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.916s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.252990] env[61898]: DEBUG nova.network.neutron [req-62271503-c89a-43ba-bbed-28212808d793 req-adf3e1c0-3dba-45e3-9110-a27c9c5c6b02 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updated VIF entry in instance network info cache for port 0d2007ae-42aa-44eb-9414-3216e1c433d4. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 829.253223] env[61898]: DEBUG nova.network.neutron [req-62271503-c89a-43ba-bbed-28212808d793 req-adf3e1c0-3dba-45e3-9110-a27c9c5c6b02 service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updating instance_info_cache with network_info: [{"id": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "address": "fa:16:3e:89:6e:d4", "network": {"id": "f4fee90a-115f-4fc6-a4d1-e1e0f188f943", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-710789360-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c406fb9a02748bfa2f24158ec5d6272", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f016d1-34a6-4ebd-81ed-a6bf9d109b87", "external-id": "nsx-vlan-transportzone-629", "segmentation_id": 629, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d2007ae-42", "ovs_interfaceid": "0d2007ae-42aa-44eb-9414-3216e1c433d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.286818] env[61898]: DEBUG nova.network.neutron [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Successfully updated port: 5650e9db-397e-427c-903b-85817fe18e52 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 829.308913] env[61898]: DEBUG nova.network.neutron [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Updating instance_info_cache with network_info: [{"id": "536edbc0-179d-441d-8f00-c0a46d9589e2", "address": "fa:16:3e:dd:ed:cc", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap536edbc0-17", "ovs_interfaceid": "536edbc0-179d-441d-8f00-c0a46d9589e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.334937] env[61898]: DEBUG nova.network.neutron [-] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.343521] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240721, 'name': ReconfigVM_Task, 'duration_secs': 0.28515} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.346944] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Reconfigured VM instance instance-0000004a to attach disk [datastore1] eda63357-6749-4652-914a-dc5b69163eb6/eda63357-6749-4652-914a-dc5b69163eb6.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 829.349516] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eede97ca-de1e-458a-bb83-14ec303a1878 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.357979] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 829.357979] env[61898]: value = "task-1240724" [ 829.357979] env[61898]: _type = "Task" [ 829.357979] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.373890] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240724, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.399371] env[61898]: DEBUG nova.network.neutron [req-e79160c7-0604-4c00-b970-0a222a136e9c req-e714afd2-ba8a-41b9-bd8d-8d049d6e9d58 service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Updated VIF entry in instance network info cache for port 6da9af79-d8f3-454e-b392-246ae38dc236. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 829.399760] env[61898]: DEBUG nova.network.neutron [req-e79160c7-0604-4c00-b970-0a222a136e9c req-e714afd2-ba8a-41b9-bd8d-8d049d6e9d58 service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Updating instance_info_cache with network_info: [{"id": "6da9af79-d8f3-454e-b392-246ae38dc236", "address": "fa:16:3e:ea:42:49", "network": {"id": "417c9ff9-1aed-4a2f-95cd-3baf6bf12936", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-259987027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a476b83a7bda4078b4690a73adfea8c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da9af79-d8", "ovs_interfaceid": "6da9af79-d8f3-454e-b392-246ae38dc236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.443630] env[61898]: DEBUG oslo_vmware.api [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240723, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171476} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.443919] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 829.444108] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 829.444293] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 829.444470] env[61898]: INFO nova.compute.manager [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 829.445035] env[61898]: DEBUG oslo.service.loopingcall [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 829.445035] env[61898]: DEBUG nova.compute.manager [-] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 829.445195] env[61898]: DEBUG nova.network.neutron [-] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 829.494276] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52076c2b-c04b-a9c0-3994-67ddaff8fc38, 'name': SearchDatastore_Task, 'duration_secs': 0.010054} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.494276] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a88c4145-301d-4328-8309-4db143f3cb36 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.501025] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 829.501025] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]523a80ca-9fc2-ca59-8a39-9f76906a46e4" [ 829.501025] env[61898]: _type = "Task" [ 829.501025] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.515348] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523a80ca-9fc2-ca59-8a39-9f76906a46e4, 'name': SearchDatastore_Task, 'duration_secs': 0.01182} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.516250] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.516250] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 5fc14058-7953-4e6a-a9ef-7933d61e9f3e/5fc14058-7953-4e6a-a9ef-7933d61e9f3e.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 829.516986] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff708e5a-1b81-4039-b029-29db14134965 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.528912] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 829.528912] env[61898]: value = "task-1240725" [ 829.528912] env[61898]: _type = "Task" [ 829.528912] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.539582] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.584525] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ff7871-4041-4295-af3c-27d07a7e166a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.599341] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808c60f4-f50b-4d8d-9d6f-d888922490fa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.636867] env[61898]: INFO nova.compute.manager [-] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Took 1.27 seconds to deallocate network for instance. [ 829.642389] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8f7b2f-57be-4a55-8eb3-637857f15e16 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.645127] env[61898]: DEBUG nova.compute.manager [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Received event network-vif-deleted-18c9f135-7f21-49ea-828d-27bdc6813079 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 829.645385] env[61898]: DEBUG nova.compute.manager [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Received event network-vif-deleted-f036aa10-aacf-4943-b51b-28b2693d3448 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 829.645618] env[61898]: DEBUG nova.compute.manager [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Received event network-vif-plugged-5650e9db-397e-427c-903b-85817fe18e52 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 829.645835] env[61898]: DEBUG oslo_concurrency.lockutils [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] Acquiring lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.646140] env[61898]: DEBUG oslo_concurrency.lockutils [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] Lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.646340] env[61898]: DEBUG oslo_concurrency.lockutils [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] Lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.646548] env[61898]: DEBUG nova.compute.manager [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] No waiting events found dispatching network-vif-plugged-5650e9db-397e-427c-903b-85817fe18e52 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 829.646762] env[61898]: WARNING nova.compute.manager [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Received unexpected event network-vif-plugged-5650e9db-397e-427c-903b-85817fe18e52 for instance with vm_state building and task_state spawning. [ 829.646973] env[61898]: DEBUG nova.compute.manager [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Received event network-changed-5650e9db-397e-427c-903b-85817fe18e52 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 829.647200] env[61898]: DEBUG nova.compute.manager [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Refreshing instance network info cache due to event network-changed-5650e9db-397e-427c-903b-85817fe18e52. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 829.647448] env[61898]: DEBUG oslo_concurrency.lockutils [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] Acquiring lock "refresh_cache-43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.647662] env[61898]: DEBUG oslo_concurrency.lockutils [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] Acquired lock "refresh_cache-43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.647827] env[61898]: DEBUG nova.network.neutron [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Refreshing network info cache for port 5650e9db-397e-427c-903b-85817fe18e52 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.658483] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4f4840-c6e2-48a2-a87f-f14562bf212d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.677476] env[61898]: DEBUG nova.compute.provider_tree [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.756404] env[61898]: DEBUG oslo_concurrency.lockutils [req-62271503-c89a-43ba-bbed-28212808d793 req-adf3e1c0-3dba-45e3-9110-a27c9c5c6b02 service nova] Releasing lock "refresh_cache-4c744673-0d9b-44ef-938f-372b101a2053" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.766114] env[61898]: DEBUG nova.compute.manager [req-568f8b01-4742-4628-9104-44dcb5cfc590 req-e2fc86c0-838c-4e6e-a22b-b5e06c797b67 service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Received event network-vif-deleted-6ebfe4e9-10ed-455d-bb95-26e7d9dbc197 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 829.766417] env[61898]: INFO nova.compute.manager [req-568f8b01-4742-4628-9104-44dcb5cfc590 req-e2fc86c0-838c-4e6e-a22b-b5e06c797b67 service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Neutron deleted interface 6ebfe4e9-10ed-455d-bb95-26e7d9dbc197; detaching it from the instance and deleting it from the info cache [ 829.766599] env[61898]: DEBUG nova.network.neutron [req-568f8b01-4742-4628-9104-44dcb5cfc590 req-e2fc86c0-838c-4e6e-a22b-b5e06c797b67 service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.788832] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "refresh_cache-43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.812117] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.843548] env[61898]: INFO nova.compute.manager [-] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Took 2.08 seconds to deallocate network for instance. [ 829.869780] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240724, 'name': Rename_Task, 'duration_secs': 0.147671} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.870128] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 829.870400] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab3b9b94-8ecd-4cd7-8d58-51ee2b9d83aa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.881692] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 829.881692] env[61898]: value = "task-1240726" [ 829.881692] env[61898]: _type = "Task" [ 829.881692] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.890843] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240726, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.903889] env[61898]: DEBUG oslo_concurrency.lockutils [req-e79160c7-0604-4c00-b970-0a222a136e9c req-e714afd2-ba8a-41b9-bd8d-8d049d6e9d58 service nova] Releasing lock "refresh_cache-5fc14058-7953-4e6a-a9ef-7933d61e9f3e" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.040685] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484975} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.040966] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 5fc14058-7953-4e6a-a9ef-7933d61e9f3e/5fc14058-7953-4e6a-a9ef-7933d61e9f3e.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 830.041187] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 830.041876] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eefad9e8-302c-43ab-81b3-2b9f2d723ac1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.049992] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 830.049992] env[61898]: value = "task-1240727" [ 830.049992] env[61898]: _type = "Task" [ 830.049992] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.061384] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.136314] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 830.137533] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81a735e-4775-404a-b774-4a91e5e1e014 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.146596] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 830.146909] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-755df013-6e7e-4087-b97f-e78effc3d09f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.159117] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.183392] env[61898]: DEBUG nova.scheduler.client.report [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 830.189327] env[61898]: DEBUG nova.network.neutron [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.241810] env[61898]: DEBUG nova.network.neutron [-] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.251217] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 830.251465] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 830.252569] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleting the datastore file [datastore2] 4db53fdf-7107-43c5-a57c-65d54b807909 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 830.252569] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a98819bf-81ff-408b-b7ec-8dd18452023f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.260096] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 830.260096] env[61898]: value = "task-1240729" [ 830.260096] env[61898]: _type = "Task" [ 830.260096] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.269438] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240729, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.269660] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9003c9bd-cbaa-43c8-9421-64f0fe9c52bc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.283305] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea290176-6033-4fa9-a8c0-d8a89e5b180c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.328333] env[61898]: DEBUG nova.compute.manager [req-568f8b01-4742-4628-9104-44dcb5cfc590 req-e2fc86c0-838c-4e6e-a22b-b5e06c797b67 service nova] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Detach interface failed, port_id=6ebfe4e9-10ed-455d-bb95-26e7d9dbc197, reason: Instance 51f33e74-0bb3-488c-9a6d-d1ccc53f469b could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 830.335120] env[61898]: DEBUG nova.network.neutron [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.395298] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240726, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.413070] env[61898]: INFO nova.compute.manager [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Took 0.57 seconds to detach 1 volumes for instance. [ 830.417135] env[61898]: DEBUG nova.compute.manager [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Deleting volume: 5e4f66be-193a-428e-ae80-03e6b55967d5 {{(pid=61898) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3281}} [ 830.564080] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068276} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.564080] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 830.564080] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6a712f-30b4-4e43-acd7-53e44d8f7a07 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.588226] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 5fc14058-7953-4e6a-a9ef-7933d61e9f3e/5fc14058-7953-4e6a-a9ef-7933d61e9f3e.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 830.588625] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de3c3887-5755-4a9f-bb94-d0eeb614f61c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.610866] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 830.610866] env[61898]: value = "task-1240731" [ 830.610866] env[61898]: _type = "Task" [ 830.610866] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.620259] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240731, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.693011] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.028s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.695711] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.424s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.697202] env[61898]: INFO nova.compute.claims [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.723729] env[61898]: INFO nova.scheduler.client.report [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Deleted allocations for instance b709df92-bf56-40ed-ba48-a8fa19be8b68 [ 830.749304] env[61898]: INFO nova.compute.manager [-] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Took 1.30 seconds to deallocate network for instance. [ 830.773886] env[61898]: DEBUG oslo_vmware.api [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240729, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.380773} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.773886] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 830.773886] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 830.773886] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 830.809531] env[61898]: INFO nova.scheduler.client.report [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleted allocations for instance 4db53fdf-7107-43c5-a57c-65d54b807909 [ 830.836483] env[61898]: DEBUG oslo_concurrency.lockutils [req-11a3c060-0153-4c87-ac79-01a9316220b0 req-9b9933ea-f737-4e00-9a50-2e46c619e745 service nova] Releasing lock "refresh_cache-43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.836943] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "refresh_cache-43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.837175] env[61898]: DEBUG nova.network.neutron [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.891951] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240726, 'name': PowerOnVM_Task, 'duration_secs': 0.593102} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.892291] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 830.892505] env[61898]: INFO nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Took 8.41 seconds to spawn the instance on the hypervisor. [ 830.892682] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 830.893543] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c14d224-5d5f-4035-8ace-3d48683b5c31 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.973356] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.125365] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240731, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.234377] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6fbaec8-e4c7-4eb3-89e0-94f1966c4d13 tempest-ServerTagsTestJSON-719079621 tempest-ServerTagsTestJSON-719079621-project-member] Lock "b709df92-bf56-40ed-ba48-a8fa19be8b68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.932s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.256901] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.314531] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.394034] env[61898]: DEBUG nova.network.neutron [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.415289] env[61898]: INFO nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Took 31.35 seconds to build instance. [ 831.625307] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240731, 'name': ReconfigVM_Task, 'duration_secs': 0.696661} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.626778] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 5fc14058-7953-4e6a-a9ef-7933d61e9f3e/5fc14058-7953-4e6a-a9ef-7933d61e9f3e.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 831.629899] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e48ec8d9-9f66-474d-8a87-224019c9446c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.641151] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 831.641151] env[61898]: value = "task-1240732" [ 831.641151] env[61898]: _type = "Task" [ 831.641151] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.650998] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240732, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.740046] env[61898]: DEBUG nova.network.neutron [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Updating instance_info_cache with network_info: [{"id": "5650e9db-397e-427c-903b-85817fe18e52", "address": "fa:16:3e:ae:16:3b", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5650e9db-39", "ovs_interfaceid": "5650e9db-397e-427c-903b-85817fe18e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.802835] env[61898]: DEBUG nova.compute.manager [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 831.803905] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91d75b9-5dee-4bf1-a6d2-5927c8e7f863 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.913076] env[61898]: DEBUG nova.compute.manager [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Received event network-vif-unplugged-536edbc0-179d-441d-8f00-c0a46d9589e2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 831.913429] env[61898]: DEBUG oslo_concurrency.lockutils [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] Acquiring lock "4db53fdf-7107-43c5-a57c-65d54b807909-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.913772] env[61898]: DEBUG oslo_concurrency.lockutils [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] Lock "4db53fdf-7107-43c5-a57c-65d54b807909-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.914081] env[61898]: DEBUG oslo_concurrency.lockutils [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] Lock "4db53fdf-7107-43c5-a57c-65d54b807909-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.915017] env[61898]: DEBUG nova.compute.manager [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] No waiting events found dispatching network-vif-unplugged-536edbc0-179d-441d-8f00-c0a46d9589e2 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 831.915017] env[61898]: DEBUG nova.compute.manager [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Received event network-vif-unplugged-536edbc0-179d-441d-8f00-c0a46d9589e2 for instance with task_state deleting. {{(pid=61898) _process_instance_event /opt/stack/nova/nova/compute/manager.py:11238}} [ 831.915017] env[61898]: DEBUG nova.compute.manager [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Received event network-changed-536edbc0-179d-441d-8f00-c0a46d9589e2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 831.915017] env[61898]: DEBUG nova.compute.manager [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Refreshing instance network info cache due to event network-changed-536edbc0-179d-441d-8f00-c0a46d9589e2. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 831.915302] env[61898]: DEBUG oslo_concurrency.lockutils [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] Acquiring lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.915539] env[61898]: DEBUG oslo_concurrency.lockutils [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] Acquired lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.915798] env[61898]: DEBUG nova.network.neutron [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Refreshing network info cache for port 536edbc0-179d-441d-8f00-c0a46d9589e2 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.917718] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "eda63357-6749-4652-914a-dc5b69163eb6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.867s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.055062] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2ec055-4b9e-44b5-b37d-4f35ced12315 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.063329] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec6ea16-b088-43f3-96e0-65f581fb7bea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.097146] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9d042e-70fa-4acd-8d91-e7cb28824a38 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.105592] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fbf3226-6001-44a1-851b-aa254941e0f3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.119859] env[61898]: DEBUG nova.compute.provider_tree [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.134044] env[61898]: DEBUG oslo_concurrency.lockutils [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "4db53fdf-7107-43c5-a57c-65d54b807909" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.150927] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240732, 'name': Rename_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.241489] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "refresh_cache-43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.241796] env[61898]: DEBUG nova.compute.manager [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Instance network_info: |[{"id": "5650e9db-397e-427c-903b-85817fe18e52", "address": "fa:16:3e:ae:16:3b", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5650e9db-39", "ovs_interfaceid": "5650e9db-397e-427c-903b-85817fe18e52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 832.242345] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:16:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5650e9db-397e-427c-903b-85817fe18e52', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.249897] env[61898]: DEBUG oslo.service.loopingcall [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 832.250189] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.250426] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7dfc0005-3b54-456d-bea9-27f8d1d8c75e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.271740] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.271740] env[61898]: value = "task-1240733" [ 832.271740] env[61898]: _type = "Task" [ 832.271740] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.280818] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240733, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.321070] env[61898]: INFO nova.compute.manager [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] instance snapshotting [ 832.321741] env[61898]: DEBUG nova.objects.instance [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'flavor' on Instance uuid 7c6aad92-6e91-48fc-89ae-5ee4c89f449c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 832.624833] env[61898]: DEBUG nova.scheduler.client.report [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 832.655206] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240732, 'name': Rename_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.740734] env[61898]: DEBUG nova.network.neutron [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Updated VIF entry in instance network info cache for port 536edbc0-179d-441d-8f00-c0a46d9589e2. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.741198] env[61898]: DEBUG nova.network.neutron [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Updating instance_info_cache with network_info: [{"id": "536edbc0-179d-441d-8f00-c0a46d9589e2", "address": "fa:16:3e:dd:ed:cc", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": null, "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap536edbc0-17", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.785623] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240733, 'name': CreateVM_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.828220] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e47fc8-2a01-44a1-ae88-6ee1b40451a4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.849501] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f87b4c-0b30-41d9-bddb-1848cf1cc9f5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.130078] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.130971] env[61898]: DEBUG nova.compute.manager [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 833.134117] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.586s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.135601] env[61898]: INFO nova.compute.claims [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.154857] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240732, 'name': Rename_Task, 'duration_secs': 1.206535} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.155537] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 833.155796] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e236996a-e2b5-452d-ba50-982421567834 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.164613] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 833.164613] env[61898]: value = "task-1240734" [ 833.164613] env[61898]: _type = "Task" [ 833.164613] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.174325] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240734, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.244484] env[61898]: DEBUG oslo_concurrency.lockutils [req-369a5b34-a7d3-4758-9362-30da82a403d3 req-507d7f25-f10f-40dc-bb74-259549ffdeea service nova] Releasing lock "refresh_cache-4db53fdf-7107-43c5-a57c-65d54b807909" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.285791] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240733, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.363882] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 833.364427] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1a67c45f-c6b6-4e20-8d89-7f9a9e61cb1e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.373786] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 833.373786] env[61898]: value = "task-1240735" [ 833.373786] env[61898]: _type = "Task" [ 833.373786] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.388534] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240735, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.640341] env[61898]: DEBUG nova.compute.utils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 833.641987] env[61898]: DEBUG nova.compute.manager [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 833.642231] env[61898]: DEBUG nova.network.neutron [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 833.677128] env[61898]: DEBUG oslo_vmware.api [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240734, 'name': PowerOnVM_Task, 'duration_secs': 0.488926} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.677407] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 833.677610] env[61898]: INFO nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Took 8.54 seconds to spawn the instance on the hypervisor. [ 833.677788] env[61898]: DEBUG nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 833.678637] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e7b64d-5348-4e60-9412-481a27141574 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.693883] env[61898]: DEBUG nova.policy [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2586563437fc4ab0a4b2802d4d01fe5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a984459656494b738b60ec791c579316', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 833.786352] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240733, 'name': CreateVM_Task, 'duration_secs': 1.03335} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.786538] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.787408] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.787631] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.788055] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 833.788526] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c065779d-e3fc-49a4-a307-91f2b2568c07 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.794318] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 833.794318] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ffc93f-9d9d-06a2-8489-6fc03b159fad" [ 833.794318] env[61898]: _type = "Task" [ 833.794318] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.803515] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ffc93f-9d9d-06a2-8489-6fc03b159fad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.884237] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c60958d5-b378-460b-b7ed-6f9a6e4eb61c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.884534] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c60958d5-b378-460b-b7ed-6f9a6e4eb61c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.884850] env[61898]: DEBUG nova.objects.instance [None req-c60958d5-b378-460b-b7ed-6f9a6e4eb61c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'flavor' on Instance uuid e851d73d-58f0-486a-a95c-70d07e5faad2 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 833.886153] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240735, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.967349] env[61898]: DEBUG nova.network.neutron [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Successfully created port: b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 834.148175] env[61898]: DEBUG nova.compute.manager [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 834.198320] env[61898]: INFO nova.compute.manager [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Took 33.09 seconds to build instance. [ 834.307796] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ffc93f-9d9d-06a2-8489-6fc03b159fad, 'name': SearchDatastore_Task, 'duration_secs': 0.012016} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.308153] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.308359] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.308799] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.308799] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.308966] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.309248] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13a45a13-0d0b-48a8-bdc2-048843c94626 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.322485] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.322722] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.323507] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e9de456-b7f3-4950-ab96-211e853baa32 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.333302] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 834.333302] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]522603fc-a580-181b-d2de-350eb2272cc0" [ 834.333302] env[61898]: _type = "Task" [ 834.333302] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.343509] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522603fc-a580-181b-d2de-350eb2272cc0, 'name': SearchDatastore_Task, 'duration_secs': 0.009723} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.344635] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43088f9f-0ac7-4df3-92f3-54e7c48b909f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.358025] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 834.358025] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]524c4fa5-7f45-cc47-42ad-6d38097cfbe0" [ 834.358025] env[61898]: _type = "Task" [ 834.358025] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.367772] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524c4fa5-7f45-cc47-42ad-6d38097cfbe0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.386277] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240735, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.389226] env[61898]: DEBUG nova.objects.instance [None req-c60958d5-b378-460b-b7ed-6f9a6e4eb61c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'pci_requests' on Instance uuid e851d73d-58f0-486a-a95c-70d07e5faad2 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 834.456285] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7d6b30-965c-434a-8cd6-da4d255d9d15 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.464133] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342e4bcb-b550-4427-bada-049ffe4fd13e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.495970] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61a914d-a9b1-43c5-8f8b-913de8900839 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.504584] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863c2303-07cf-417e-bbb3-7bb49211c254 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.518507] env[61898]: DEBUG nova.compute.provider_tree [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.700800] env[61898]: DEBUG oslo_concurrency.lockutils [None req-492d6cd6-aef6-4b31-ac8c-297fec717585 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.606s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.866377] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524c4fa5-7f45-cc47-42ad-6d38097cfbe0, 'name': SearchDatastore_Task, 'duration_secs': 0.012161} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.866665] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.866927] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d/43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.867213] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32299a4c-c380-4a29-ad01-e3b39a1e4318 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.874827] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 834.874827] env[61898]: value = "task-1240736" [ 834.874827] env[61898]: _type = "Task" [ 834.874827] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.889056] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240736, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.891893] env[61898]: DEBUG nova.objects.base [None req-c60958d5-b378-460b-b7ed-6f9a6e4eb61c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 834.892119] env[61898]: DEBUG nova.network.neutron [None req-c60958d5-b378-460b-b7ed-6f9a6e4eb61c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 834.893984] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240735, 'name': CreateSnapshot_Task, 'duration_secs': 1.039109} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.894248] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 834.895372] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755aad9a-1c00-4f12-a587-4c6166448a8b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.015488] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c60958d5-b378-460b-b7ed-6f9a6e4eb61c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.131s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.021507] env[61898]: DEBUG nova.scheduler.client.report [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 835.158210] env[61898]: DEBUG nova.compute.manager [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 835.191272] env[61898]: DEBUG nova.virt.hardware [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.191669] env[61898]: DEBUG nova.virt.hardware [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.191871] env[61898]: DEBUG nova.virt.hardware [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.192080] env[61898]: DEBUG nova.virt.hardware [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.192247] env[61898]: DEBUG nova.virt.hardware [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.192425] env[61898]: DEBUG nova.virt.hardware [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.192645] env[61898]: DEBUG nova.virt.hardware [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.192821] env[61898]: DEBUG nova.virt.hardware [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.193041] env[61898]: DEBUG nova.virt.hardware [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.193230] env[61898]: DEBUG nova.virt.hardware [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.193410] env[61898]: DEBUG nova.virt.hardware [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.194356] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6680e8e8-1e00-403d-a680-9fc1671f4335 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.203743] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfbf3a1-f38a-412c-bb20-3e0ae80aab34 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.386692] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240736, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463568} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.387138] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d/43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.387230] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.387503] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93d7420e-910c-4b8e-814a-f80556547618 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.396885] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 835.396885] env[61898]: value = "task-1240737" [ 835.396885] env[61898]: _type = "Task" [ 835.396885] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.407474] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240737, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.416137] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 835.416437] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c4019744-d00e-42a6-9985-0f4635e6b056 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.426130] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 835.426130] env[61898]: value = "task-1240738" [ 835.426130] env[61898]: _type = "Task" [ 835.426130] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.435944] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240738, 'name': CloneVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.526270] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.392s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.526805] env[61898]: DEBUG nova.compute.manager [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 835.529531] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.718s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.531020] env[61898]: INFO nova.compute.claims [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.662755] env[61898]: DEBUG nova.network.neutron [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Successfully updated port: b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 835.756263] env[61898]: DEBUG nova.compute.manager [req-6508fe52-5738-4856-bcfc-63d8e81f11e9 req-0a9c120e-c037-4be6-a976-eefe913834d3 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Received event network-vif-plugged-b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 835.756476] env[61898]: DEBUG oslo_concurrency.lockutils [req-6508fe52-5738-4856-bcfc-63d8e81f11e9 req-0a9c120e-c037-4be6-a976-eefe913834d3 service nova] Acquiring lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.756687] env[61898]: DEBUG oslo_concurrency.lockutils [req-6508fe52-5738-4856-bcfc-63d8e81f11e9 req-0a9c120e-c037-4be6-a976-eefe913834d3 service nova] Lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.756863] env[61898]: DEBUG oslo_concurrency.lockutils [req-6508fe52-5738-4856-bcfc-63d8e81f11e9 req-0a9c120e-c037-4be6-a976-eefe913834d3 service nova] Lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.757048] env[61898]: DEBUG nova.compute.manager [req-6508fe52-5738-4856-bcfc-63d8e81f11e9 req-0a9c120e-c037-4be6-a976-eefe913834d3 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] No waiting events found dispatching network-vif-plugged-b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 835.757224] env[61898]: WARNING nova.compute.manager [req-6508fe52-5738-4856-bcfc-63d8e81f11e9 req-0a9c120e-c037-4be6-a976-eefe913834d3 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Received unexpected event network-vif-plugged-b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64 for instance with vm_state building and task_state spawning. [ 835.794147] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "eda63357-6749-4652-914a-dc5b69163eb6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.794423] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "eda63357-6749-4652-914a-dc5b69163eb6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.794645] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "eda63357-6749-4652-914a-dc5b69163eb6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.794827] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "eda63357-6749-4652-914a-dc5b69163eb6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.794998] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "eda63357-6749-4652-914a-dc5b69163eb6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.797097] env[61898]: INFO nova.compute.manager [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Terminating instance [ 835.907437] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240737, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072752} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.907761] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.908668] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9a7567-4352-42c7-ad14-e5875c3ac8d2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.934145] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d/43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.934527] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1863099-c97f-4247-941b-8084f83976c2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.958694] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240738, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.960037] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 835.960037] env[61898]: value = "task-1240739" [ 835.960037] env[61898]: _type = "Task" [ 835.960037] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.968314] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240739, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.037205] env[61898]: DEBUG nova.compute.utils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 836.038601] env[61898]: DEBUG nova.compute.manager [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 836.038772] env[61898]: DEBUG nova.network.neutron [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.040729] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.040960] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.041161] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.041343] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.041510] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.043443] env[61898]: INFO nova.compute.manager [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Terminating instance [ 836.113288] env[61898]: DEBUG nova.policy [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a2508b1f3f945459495cef52abefedb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '819c8a7ff0aa4d7186bd859e4b56d16e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 836.167146] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "refresh_cache-9b7b9962-fda1-46af-9ecc-ea5b352d5193" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.167383] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired lock "refresh_cache-9b7b9962-fda1-46af-9ecc-ea5b352d5193" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.167530] env[61898]: DEBUG nova.network.neutron [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.301311] env[61898]: DEBUG nova.compute.manager [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 836.301679] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.303105] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a485ad1-1a9a-479c-a397-afc58d369177 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.313723] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 836.313997] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7499965-ada3-47c9-976e-2258912a07c0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.321174] env[61898]: DEBUG oslo_vmware.api [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 836.321174] env[61898]: value = "task-1240740" [ 836.321174] env[61898]: _type = "Task" [ 836.321174] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.330759] env[61898]: DEBUG oslo_vmware.api [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240740, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.441543] env[61898]: DEBUG nova.network.neutron [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Successfully created port: 231d39d3-2188-4318-a44d-7fbd419d0624 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.451738] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240738, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.471719] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240739, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.547332] env[61898]: DEBUG nova.compute.manager [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 836.548033] env[61898]: DEBUG nova.compute.manager [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 836.548406] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.549705] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0593d674-53d1-49ee-93fa-b832a36e1803 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.558826] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 836.559363] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49f30f1b-8858-4df6-92e9-290a3502eae6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.571241] env[61898]: DEBUG oslo_vmware.api [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 836.571241] env[61898]: value = "task-1240741" [ 836.571241] env[61898]: _type = "Task" [ 836.571241] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.579840] env[61898]: DEBUG oslo_vmware.api [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240741, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.738160] env[61898]: DEBUG nova.network.neutron [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 836.835013] env[61898]: DEBUG oslo_vmware.api [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240740, 'name': PowerOffVM_Task, 'duration_secs': 0.252053} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.835363] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 836.835538] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 836.835808] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f40c22f3-db84-4b4c-b630-00e379ef525d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.875064] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af16ecd-99ad-4677-8406-3a134daba198 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.888145] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabb332e-faab-4458-aac0-bd6a5a7ed834 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.924448] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d16a5ba-d467-4da7-885f-ad4f336be2fd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.927569] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 836.927856] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 836.928105] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleting the datastore file [datastore1] eda63357-6749-4652-914a-dc5b69163eb6 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 836.928883] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d17bcf7-6892-4d5c-b437-802c35992a0a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.941092] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d42c26e-300b-4ddd-a28c-88443a12a61a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.945581] env[61898]: DEBUG oslo_vmware.api [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 836.945581] env[61898]: value = "task-1240743" [ 836.945581] env[61898]: _type = "Task" [ 836.945581] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.961341] env[61898]: DEBUG nova.compute.provider_tree [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.962641] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240738, 'name': CloneVM_Task} progress is 95%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.963608] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.963878] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.964235] env[61898]: DEBUG nova.objects.instance [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'flavor' on Instance uuid e851d73d-58f0-486a-a95c-70d07e5faad2 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 836.975138] env[61898]: DEBUG oslo_vmware.api [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240743, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.982070] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240739, 'name': ReconfigVM_Task, 'duration_secs': 0.662458} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.982070] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d/43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.982792] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-32f2916e-b37a-4837-94c5-4e9267833032 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.991240] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 836.991240] env[61898]: value = "task-1240744" [ 836.991240] env[61898]: _type = "Task" [ 836.991240] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.996523] env[61898]: DEBUG nova.network.neutron [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Updating instance_info_cache with network_info: [{"id": "b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64", "address": "fa:16:3e:8a:0a:5a", "network": {"id": "616f0775-546c-4124-b414-c2ce3228e7ec", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-245576291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a984459656494b738b60ec791c579316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7fb0979-2b", "ovs_interfaceid": "b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.007893] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240744, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.082215] env[61898]: DEBUG oslo_vmware.api [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240741, 'name': PowerOffVM_Task, 'duration_secs': 0.284645} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.082555] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 837.082768] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 837.083073] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1bb0527a-8843-4ee8-b6b1-3c7f3ce8e8e1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.164031] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 837.164031] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 837.164344] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleting the datastore file [datastore1] 5fc14058-7953-4e6a-a9ef-7933d61e9f3e {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 837.164380] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2faa0883-3894-4af5-a601-562bc62a2583 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.171771] env[61898]: DEBUG oslo_vmware.api [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 837.171771] env[61898]: value = "task-1240746" [ 837.171771] env[61898]: _type = "Task" [ 837.171771] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.181970] env[61898]: DEBUG oslo_vmware.api [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240746, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.447102] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240738, 'name': CloneVM_Task, 'duration_secs': 1.766277} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.450261] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Created linked-clone VM from snapshot [ 837.451011] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e287ed-e3ab-4d1c-ba26-fb1647fa6ba2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.459191] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Uploading image 7d0b0872-de3d-40f0-91fd-fb21768b8b13 {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 837.464454] env[61898]: DEBUG nova.scheduler.client.report [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 837.468113] env[61898]: DEBUG oslo_vmware.api [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240743, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1462} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.470245] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 837.470245] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 837.470245] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.470245] env[61898]: INFO nova.compute.manager [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Took 1.17 seconds to destroy the instance on the hypervisor. [ 837.470245] env[61898]: DEBUG oslo.service.loopingcall [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.470245] env[61898]: DEBUG nova.compute.manager [-] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 837.470245] env[61898]: DEBUG nova.network.neutron [-] [instance: eda63357-6749-4652-914a-dc5b69163eb6] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.486045] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 837.486045] env[61898]: value = "vm-267653" [ 837.486045] env[61898]: _type = "VirtualMachine" [ 837.486045] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 837.486555] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8e19235a-9220-4f02-aec5-9f891a80aa73 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.496986] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lease: (returnval){ [ 837.496986] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52edf039-bd08-5ff6-2413-6f4fd859e747" [ 837.496986] env[61898]: _type = "HttpNfcLease" [ 837.496986] env[61898]: } obtained for exporting VM: (result){ [ 837.496986] env[61898]: value = "vm-267653" [ 837.496986] env[61898]: _type = "VirtualMachine" [ 837.496986] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 837.497299] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the lease: (returnval){ [ 837.497299] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52edf039-bd08-5ff6-2413-6f4fd859e747" [ 837.497299] env[61898]: _type = "HttpNfcLease" [ 837.497299] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 837.503807] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Releasing lock "refresh_cache-9b7b9962-fda1-46af-9ecc-ea5b352d5193" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.504122] env[61898]: DEBUG nova.compute.manager [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Instance network_info: |[{"id": "b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64", "address": "fa:16:3e:8a:0a:5a", "network": {"id": "616f0775-546c-4124-b414-c2ce3228e7ec", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-245576291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a984459656494b738b60ec791c579316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7fb0979-2b", "ovs_interfaceid": "b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 837.504650] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240744, 'name': Rename_Task, 'duration_secs': 0.221947} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.504987] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:0a:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 837.512663] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Creating folder: Project (a984459656494b738b60ec791c579316). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 837.513304] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.515672] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b5882f1-7f06-4171-a48a-24a20ac8c443 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.517144] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe6ac206-6e37-459f-96c6-409ba0bfb859 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.521953] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 837.521953] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52edf039-bd08-5ff6-2413-6f4fd859e747" [ 837.521953] env[61898]: _type = "HttpNfcLease" [ 837.521953] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 837.522920] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 837.522920] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52edf039-bd08-5ff6-2413-6f4fd859e747" [ 837.522920] env[61898]: _type = "HttpNfcLease" [ 837.522920] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 837.523643] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54593599-4fc5-4c30-af94-8652bc9c7dc1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.527434] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 837.527434] env[61898]: value = "task-1240749" [ 837.527434] env[61898]: _type = "Task" [ 837.527434] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.533561] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd827-47b6-a74a-1cad-6f4036ff669e/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 837.533735] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd827-47b6-a74a-1cad-6f4036ff669e/disk-0.vmdk for reading. {{(pid=61898) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 837.536365] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Created folder: Project (a984459656494b738b60ec791c579316) in parent group-v267550. [ 837.536549] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Creating folder: Instances. Parent ref: group-v267654. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 837.537633] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8678b6bc-3689-4b47-8f5f-8ca57a6b46b6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.598183] env[61898]: DEBUG nova.compute.manager [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 837.600284] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240749, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.601246] env[61898]: DEBUG nova.objects.instance [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'pci_requests' on Instance uuid e851d73d-58f0-486a-a95c-70d07e5faad2 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 837.614806] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Created folder: Instances in parent group-v267654. [ 837.615118] env[61898]: DEBUG oslo.service.loopingcall [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.615332] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 837.615562] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-306f9d26-4127-4882-9c2b-9eedc67c5f3a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.638459] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.638459] env[61898]: value = "task-1240751" [ 837.638459] env[61898]: _type = "Task" [ 837.638459] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.640674] env[61898]: DEBUG nova.virt.hardware [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 837.640904] env[61898]: DEBUG nova.virt.hardware [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 837.641075] env[61898]: DEBUG nova.virt.hardware [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.641259] env[61898]: DEBUG nova.virt.hardware [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 837.641408] env[61898]: DEBUG nova.virt.hardware [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.641556] env[61898]: DEBUG nova.virt.hardware [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 837.641761] env[61898]: DEBUG nova.virt.hardware [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 837.641912] env[61898]: DEBUG nova.virt.hardware [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 837.642091] env[61898]: DEBUG nova.virt.hardware [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 837.642280] env[61898]: DEBUG nova.virt.hardware [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 837.642476] env[61898]: DEBUG nova.virt.hardware [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 837.643620] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2da0712-be0e-406c-96ab-23e64e3c95b6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.652074] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e86a4f39-539e-4f17-adb8-79fc3fbb5a72 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.660549] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d494054-fc9f-4adb-b1d1-16a0a705a573 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.664662] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240751, 'name': CreateVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.692334] env[61898]: DEBUG oslo_vmware.api [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240746, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129251} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.692334] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 837.692334] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 837.692334] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 837.692334] env[61898]: INFO nova.compute.manager [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 837.692334] env[61898]: DEBUG oslo.service.loopingcall [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 837.692334] env[61898]: DEBUG nova.compute.manager [-] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 837.692334] env[61898]: DEBUG nova.network.neutron [-] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.895608] env[61898]: DEBUG nova.compute.manager [req-c4e6726b-98aa-4efd-808e-e9cc5e530313 req-f0508358-214b-4de9-8b12-b4adaaae5899 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Received event network-changed-b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 837.896571] env[61898]: DEBUG nova.compute.manager [req-c4e6726b-98aa-4efd-808e-e9cc5e530313 req-f0508358-214b-4de9-8b12-b4adaaae5899 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Refreshing instance network info cache due to event network-changed-b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 837.896571] env[61898]: DEBUG oslo_concurrency.lockutils [req-c4e6726b-98aa-4efd-808e-e9cc5e530313 req-f0508358-214b-4de9-8b12-b4adaaae5899 service nova] Acquiring lock "refresh_cache-9b7b9962-fda1-46af-9ecc-ea5b352d5193" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.896743] env[61898]: DEBUG oslo_concurrency.lockutils [req-c4e6726b-98aa-4efd-808e-e9cc5e530313 req-f0508358-214b-4de9-8b12-b4adaaae5899 service nova] Acquired lock "refresh_cache-9b7b9962-fda1-46af-9ecc-ea5b352d5193" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.896835] env[61898]: DEBUG nova.network.neutron [req-c4e6726b-98aa-4efd-808e-e9cc5e530313 req-f0508358-214b-4de9-8b12-b4adaaae5899 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Refreshing network info cache for port b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 837.970750] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.971158] env[61898]: DEBUG nova.compute.manager [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 837.973715] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.599s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.974099] env[61898]: DEBUG nova.objects.instance [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lazy-loading 'resources' on Instance uuid 52a584e1-61ae-447d-90e0-e15d32a96314 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 837.980499] env[61898]: DEBUG nova.compute.manager [req-442d3fe4-b66e-4072-a71b-9b7ba61686ae req-80199532-99a9-459b-96e8-11871b6ce1fc service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Received event network-vif-deleted-6da9af79-d8f3-454e-b392-246ae38dc236 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 837.981425] env[61898]: INFO nova.compute.manager [req-442d3fe4-b66e-4072-a71b-9b7ba61686ae req-80199532-99a9-459b-96e8-11871b6ce1fc service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Neutron deleted interface 6da9af79-d8f3-454e-b392-246ae38dc236; detaching it from the instance and deleting it from the info cache [ 837.981425] env[61898]: DEBUG nova.network.neutron [req-442d3fe4-b66e-4072-a71b-9b7ba61686ae req-80199532-99a9-459b-96e8-11871b6ce1fc service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.040095] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240749, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.105095] env[61898]: DEBUG nova.objects.base [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 838.105353] env[61898]: DEBUG nova.network.neutron [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 838.150312] env[61898]: DEBUG nova.policy [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080b70de4bd5465e8e696943b90f4ff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2c65efa327e403284ad2e78b3c7b7d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 838.166124] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240751, 'name': CreateVM_Task, 'duration_secs': 0.411503} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.166532] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 838.167406] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.167729] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.168470] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 838.168941] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3644d6b5-2e49-4665-9451-802c26cba964 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.175835] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 838.175835] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52559c99-bfa8-cbef-c174-a5ac648c483f" [ 838.175835] env[61898]: _type = "Task" [ 838.175835] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.186658] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52559c99-bfa8-cbef-c174-a5ac648c483f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.281991] env[61898]: DEBUG nova.network.neutron [-] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.452481] env[61898]: DEBUG nova.network.neutron [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Successfully updated port: 231d39d3-2188-4318-a44d-7fbd419d0624 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.458410] env[61898]: DEBUG nova.network.neutron [-] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.477576] env[61898]: DEBUG nova.compute.utils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 838.479923] env[61898]: DEBUG nova.compute.manager [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 838.480318] env[61898]: DEBUG nova.network.neutron [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 838.489183] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23638ad3-830d-4ce1-93af-5d75004c88a3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.501029] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85a65fb-c55b-4279-aaf6-f1d432aff3c9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.518161] env[61898]: DEBUG nova.network.neutron [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Successfully created port: bc699656-235b-4405-92f3-966811d6a509 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.547855] env[61898]: DEBUG nova.compute.manager [req-442d3fe4-b66e-4072-a71b-9b7ba61686ae req-80199532-99a9-459b-96e8-11871b6ce1fc service nova] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Detach interface failed, port_id=6da9af79-d8f3-454e-b392-246ae38dc236, reason: Instance 5fc14058-7953-4e6a-a9ef-7933d61e9f3e could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 838.556660] env[61898]: DEBUG oslo_vmware.api [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240749, 'name': PowerOnVM_Task, 'duration_secs': 0.531847} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.557599] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.557599] env[61898]: INFO nova.compute.manager [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Took 9.84 seconds to spawn the instance on the hypervisor. [ 838.557599] env[61898]: DEBUG nova.compute.manager [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 838.558246] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26383b0c-24d4-407a-9848-e051c5df00ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.615138] env[61898]: DEBUG nova.policy [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e2c909f4306477d8fc741ab3aac9d02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e8b71885c83418fb13e216f804ffeeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 838.694716] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52559c99-bfa8-cbef-c174-a5ac648c483f, 'name': SearchDatastore_Task, 'duration_secs': 0.010483} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.694996] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.695577] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 838.695577] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.695722] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.695884] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 838.698898] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1530745-5f9a-4754-b87d-757e46aef54e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.712537] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 838.713016] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 838.713944] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3ed7706-d564-4558-9524-f66b75ad9c61 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.721784] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 838.721784] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]523c5792-e96a-bedc-7c5c-48a39657d430" [ 838.721784] env[61898]: _type = "Task" [ 838.721784] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.731170] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523c5792-e96a-bedc-7c5c-48a39657d430, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.786493] env[61898]: INFO nova.compute.manager [-] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Took 1.32 seconds to deallocate network for instance. [ 838.867065] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7466a47c-c4c1-4b36-8368-b4f893091b9f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.877239] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4964762c-505e-48f0-8ff1-a4d1a31f1517 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.884420] env[61898]: DEBUG nova.network.neutron [req-c4e6726b-98aa-4efd-808e-e9cc5e530313 req-f0508358-214b-4de9-8b12-b4adaaae5899 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Updated VIF entry in instance network info cache for port b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 838.884887] env[61898]: DEBUG nova.network.neutron [req-c4e6726b-98aa-4efd-808e-e9cc5e530313 req-f0508358-214b-4de9-8b12-b4adaaae5899 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Updating instance_info_cache with network_info: [{"id": "b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64", "address": "fa:16:3e:8a:0a:5a", "network": {"id": "616f0775-546c-4124-b414-c2ce3228e7ec", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-245576291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a984459656494b738b60ec791c579316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7fb0979-2b", "ovs_interfaceid": "b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.923471] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5ca598-6ab3-41d5-9a6e-3af564173dd4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.932664] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a43d3c6-14ac-4179-9e86-880d67178b1e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.950040] env[61898]: DEBUG nova.compute.provider_tree [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.956283] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "refresh_cache-e5c38d18-18e4-47dc-8445-71d3dc0c325a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.956408] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "refresh_cache-e5c38d18-18e4-47dc-8445-71d3dc0c325a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.957781] env[61898]: DEBUG nova.network.neutron [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 838.963138] env[61898]: INFO nova.compute.manager [-] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Took 1.27 seconds to deallocate network for instance. [ 838.987515] env[61898]: DEBUG nova.compute.manager [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 839.082149] env[61898]: INFO nova.compute.manager [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Took 33.17 seconds to build instance. [ 839.117061] env[61898]: DEBUG nova.network.neutron [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Successfully created port: b1aac51c-a20e-43a4-94eb-1aaf57b59f76 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 839.234585] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523c5792-e96a-bedc-7c5c-48a39657d430, 'name': SearchDatastore_Task, 'duration_secs': 0.010152} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.235670] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c48b540f-8685-44e4-a2b4-04a49ff48af5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.242547] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 839.242547] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ca2739-83be-38ae-f1b6-47afb6d45967" [ 839.242547] env[61898]: _type = "Task" [ 839.242547] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.251742] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ca2739-83be-38ae-f1b6-47afb6d45967, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.294193] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.387643] env[61898]: DEBUG oslo_concurrency.lockutils [req-c4e6726b-98aa-4efd-808e-e9cc5e530313 req-f0508358-214b-4de9-8b12-b4adaaae5899 service nova] Releasing lock "refresh_cache-9b7b9962-fda1-46af-9ecc-ea5b352d5193" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.453468] env[61898]: DEBUG nova.scheduler.client.report [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 839.473843] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.508117] env[61898]: DEBUG nova.network.neutron [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 839.582930] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dba7b0a1-b8c0-4d5f-be02-f12bad56b181 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.695s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.709051] env[61898]: DEBUG nova.network.neutron [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Updating instance_info_cache with network_info: [{"id": "231d39d3-2188-4318-a44d-7fbd419d0624", "address": "fa:16:3e:47:e6:6b", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap231d39d3-21", "ovs_interfaceid": "231d39d3-2188-4318-a44d-7fbd419d0624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.754650] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ca2739-83be-38ae-f1b6-47afb6d45967, 'name': SearchDatastore_Task, 'duration_secs': 0.009641} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.754750] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.754995] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 9b7b9962-fda1-46af-9ecc-ea5b352d5193/9b7b9962-fda1-46af-9ecc-ea5b352d5193.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 839.755531] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a07a012-233b-436c-9f68-a38fbeee3b59 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.764499] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 839.764499] env[61898]: value = "task-1240752" [ 839.764499] env[61898]: _type = "Task" [ 839.764499] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.775556] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.959271] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.985s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.962353] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.043s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.997087] env[61898]: INFO nova.scheduler.client.report [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleted allocations for instance 52a584e1-61ae-447d-90e0-e15d32a96314 [ 840.001413] env[61898]: DEBUG nova.compute.manager [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 840.033126] env[61898]: DEBUG nova.virt.hardware [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 840.033388] env[61898]: DEBUG nova.virt.hardware [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 840.033543] env[61898]: DEBUG nova.virt.hardware [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.033723] env[61898]: DEBUG nova.virt.hardware [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 840.033883] env[61898]: DEBUG nova.virt.hardware [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.034900] env[61898]: DEBUG nova.virt.hardware [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 840.035250] env[61898]: DEBUG nova.virt.hardware [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 840.036420] env[61898]: DEBUG nova.virt.hardware [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 840.036420] env[61898]: DEBUG nova.virt.hardware [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 840.036420] env[61898]: DEBUG nova.virt.hardware [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 840.036625] env[61898]: DEBUG nova.virt.hardware [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 840.037623] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507472a4-8266-4d8e-ac2e-08e524922752 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.053652] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eecc14da-1d8d-4d3b-8213-f8b81b11b9b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.188911] env[61898]: DEBUG nova.compute.manager [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Received event network-vif-deleted-b5e10793-f18b-4c54-8373-45f9b9e9fd46 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 840.189164] env[61898]: DEBUG nova.compute.manager [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Received event network-vif-plugged-231d39d3-2188-4318-a44d-7fbd419d0624 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 840.189453] env[61898]: DEBUG oslo_concurrency.lockutils [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] Acquiring lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.189565] env[61898]: DEBUG oslo_concurrency.lockutils [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] Lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.189737] env[61898]: DEBUG oslo_concurrency.lockutils [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] Lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.189986] env[61898]: DEBUG nova.compute.manager [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] No waiting events found dispatching network-vif-plugged-231d39d3-2188-4318-a44d-7fbd419d0624 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 840.190199] env[61898]: WARNING nova.compute.manager [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Received unexpected event network-vif-plugged-231d39d3-2188-4318-a44d-7fbd419d0624 for instance with vm_state building and task_state spawning. [ 840.190375] env[61898]: DEBUG nova.compute.manager [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Received event network-changed-231d39d3-2188-4318-a44d-7fbd419d0624 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 840.190659] env[61898]: DEBUG nova.compute.manager [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Refreshing instance network info cache due to event network-changed-231d39d3-2188-4318-a44d-7fbd419d0624. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 840.190730] env[61898]: DEBUG oslo_concurrency.lockutils [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] Acquiring lock "refresh_cache-e5c38d18-18e4-47dc-8445-71d3dc0c325a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.209269] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "refresh_cache-e5c38d18-18e4-47dc-8445-71d3dc0c325a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.209608] env[61898]: DEBUG nova.compute.manager [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Instance network_info: |[{"id": "231d39d3-2188-4318-a44d-7fbd419d0624", "address": "fa:16:3e:47:e6:6b", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap231d39d3-21", "ovs_interfaceid": "231d39d3-2188-4318-a44d-7fbd419d0624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 840.210291] env[61898]: DEBUG oslo_concurrency.lockutils [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] Acquired lock "refresh_cache-e5c38d18-18e4-47dc-8445-71d3dc0c325a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.210484] env[61898]: DEBUG nova.network.neutron [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Refreshing network info cache for port 231d39d3-2188-4318-a44d-7fbd419d0624 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.211741] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:e6:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '231d39d3-2188-4318-a44d-7fbd419d0624', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.219619] env[61898]: DEBUG oslo.service.loopingcall [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.220077] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.220324] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed6f46a0-0722-4ed3-9d74-4a283088f7da {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.245124] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.245124] env[61898]: value = "task-1240753" [ 840.245124] env[61898]: _type = "Task" [ 840.245124] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.257462] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240753, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.279039] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240752, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460342} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.279463] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 9b7b9962-fda1-46af-9ecc-ea5b352d5193/9b7b9962-fda1-46af-9ecc-ea5b352d5193.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 840.279911] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 840.280313] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-89dbcc9b-6930-4969-a1fe-fab63a44b90b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.284760] env[61898]: DEBUG nova.network.neutron [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Successfully updated port: bc699656-235b-4405-92f3-966811d6a509 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 840.289888] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 840.289888] env[61898]: value = "task-1240754" [ 840.289888] env[61898]: _type = "Task" [ 840.289888] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.304463] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240754, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.326039] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da85bda-016e-40a0-b1b4-3634fc444eac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.334602] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7d8add-b261-4dda-a9b8-3b9caf600691 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.375391] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d418035-6078-4238-8c89-0cbdc8cd12b2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.386686] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a35364-b1ab-4398-8957-b0f7756e6e07 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.403007] env[61898]: DEBUG nova.compute.provider_tree [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.508280] env[61898]: DEBUG nova.compute.manager [req-93f23ea2-aead-471f-adea-17cd46c7e339 req-0ebf6623-0265-40bd-a143-41f3e1b5b1ae service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received event network-vif-plugged-bc699656-235b-4405-92f3-966811d6a509 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 840.508280] env[61898]: DEBUG oslo_concurrency.lockutils [req-93f23ea2-aead-471f-adea-17cd46c7e339 req-0ebf6623-0265-40bd-a143-41f3e1b5b1ae service nova] Acquiring lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.508851] env[61898]: DEBUG oslo_concurrency.lockutils [req-93f23ea2-aead-471f-adea-17cd46c7e339 req-0ebf6623-0265-40bd-a143-41f3e1b5b1ae service nova] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.509176] env[61898]: DEBUG oslo_concurrency.lockutils [req-93f23ea2-aead-471f-adea-17cd46c7e339 req-0ebf6623-0265-40bd-a143-41f3e1b5b1ae service nova] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.509395] env[61898]: DEBUG nova.compute.manager [req-93f23ea2-aead-471f-adea-17cd46c7e339 req-0ebf6623-0265-40bd-a143-41f3e1b5b1ae service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] No waiting events found dispatching network-vif-plugged-bc699656-235b-4405-92f3-966811d6a509 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 840.509574] env[61898]: WARNING nova.compute.manager [req-93f23ea2-aead-471f-adea-17cd46c7e339 req-0ebf6623-0265-40bd-a143-41f3e1b5b1ae service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received unexpected event network-vif-plugged-bc699656-235b-4405-92f3-966811d6a509 for instance with vm_state active and task_state None. [ 840.513889] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a5bd0a45-c3eb-43ac-a534-cc4c04d55c9c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "52a584e1-61ae-447d-90e0-e15d32a96314" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.252s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.757415] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240753, 'name': CreateVM_Task, 'duration_secs': 0.511032} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.759766] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.760529] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.760743] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.761069] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 840.765024] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66b9b496-156b-4712-878d-a3706b29db81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.767291] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 840.767291] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]520a5fdd-fcbc-a70c-4e98-b914ab75a3f2" [ 840.767291] env[61898]: _type = "Task" [ 840.767291] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.776862] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520a5fdd-fcbc-a70c-4e98-b914ab75a3f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.786749] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.786916] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.787166] env[61898]: DEBUG nova.network.neutron [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.801642] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240754, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079367} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.801642] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 840.804243] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9ee8fa-a0b8-4239-83f9-4d3abbce0918 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.827243] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 9b7b9962-fda1-46af-9ecc-ea5b352d5193/9b7b9962-fda1-46af-9ecc-ea5b352d5193.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 840.830515] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88738272-ec46-4960-9746-6f8be776ec27 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.853615] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 840.853615] env[61898]: value = "task-1240755" [ 840.853615] env[61898]: _type = "Task" [ 840.853615] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.863352] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240755, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.907255] env[61898]: DEBUG nova.scheduler.client.report [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 840.909802] env[61898]: DEBUG nova.network.neutron [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Successfully updated port: b1aac51c-a20e-43a4-94eb-1aaf57b59f76 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 841.018412] env[61898]: DEBUG nova.network.neutron [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Updated VIF entry in instance network info cache for port 231d39d3-2188-4318-a44d-7fbd419d0624. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.018573] env[61898]: DEBUG nova.network.neutron [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Updating instance_info_cache with network_info: [{"id": "231d39d3-2188-4318-a44d-7fbd419d0624", "address": "fa:16:3e:47:e6:6b", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap231d39d3-21", "ovs_interfaceid": "231d39d3-2188-4318-a44d-7fbd419d0624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.278653] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520a5fdd-fcbc-a70c-4e98-b914ab75a3f2, 'name': SearchDatastore_Task, 'duration_secs': 0.01037} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.278976] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.279236] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.279471] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.279621] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.280334] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.280334] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6f77d2c-dcaa-42a0-8068-fce54720a261 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.293099] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.293376] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.294460] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b0cc202-c273-4d69-94e7-31cb3306b1bf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.301573] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 841.301573] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]523d63c0-57e5-ab57-0cc9-c69a86f5c238" [ 841.301573] env[61898]: _type = "Task" [ 841.301573] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.310919] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523d63c0-57e5-ab57-0cc9-c69a86f5c238, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.339534] env[61898]: WARNING nova.network.neutron [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] 89882853-88ec-48f1-a883-3be9e65f9fd8 already exists in list: networks containing: ['89882853-88ec-48f1-a883-3be9e65f9fd8']. ignoring it [ 841.364980] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240755, 'name': ReconfigVM_Task, 'duration_secs': 0.335056} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.365349] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 9b7b9962-fda1-46af-9ecc-ea5b352d5193/9b7b9962-fda1-46af-9ecc-ea5b352d5193.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 841.366334] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0fee37d-e145-4fb6-a8b0-23845301ee92 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.375260] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 841.375260] env[61898]: value = "task-1240756" [ 841.375260] env[61898]: _type = "Task" [ 841.375260] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.388159] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240756, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.412551] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "refresh_cache-49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.412721] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "refresh_cache-49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.412918] env[61898]: DEBUG nova.network.neutron [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.414582] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.452s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.414803] env[61898]: INFO nova.compute.manager [None req-ce6d0559-31f4-4594-b718-ab8f3baa64df tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Successfully reverted task state from rebuilding on failure for instance. [ 841.427734] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.990s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.428788] env[61898]: DEBUG nova.objects.instance [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lazy-loading 'resources' on Instance uuid 626caecc-6389-4064-aafd-9968cee262ee {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 841.523182] env[61898]: DEBUG oslo_concurrency.lockutils [req-55eb314e-7be3-4d0b-b35e-edf31fd20946 req-b17b03a2-1a76-41c1-9661-b633b69525bf service nova] Releasing lock "refresh_cache-e5c38d18-18e4-47dc-8445-71d3dc0c325a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.558622] env[61898]: INFO nova.compute.manager [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Rebuilding instance [ 841.603940] env[61898]: DEBUG nova.compute.manager [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 841.604830] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad714bb-73da-4e19-ad78-4bf471a01a97 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.718232] env[61898]: DEBUG nova.network.neutron [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [{"id": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "address": "fa:16:3e:40:36:b4", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eab7c47-4a", "ovs_interfaceid": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bc699656-235b-4405-92f3-966811d6a509", "address": "fa:16:3e:67:08:7d", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc699656-23", "ovs_interfaceid": "bc699656-235b-4405-92f3-966811d6a509", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.813028] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523d63c0-57e5-ab57-0cc9-c69a86f5c238, 'name': SearchDatastore_Task, 'duration_secs': 0.020851} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.813822] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30d52246-da8d-41c6-b8b7-446c58d6579b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.819389] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 841.819389] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525aee78-655f-8d7f-d80c-bc9e3a6aa565" [ 841.819389] env[61898]: _type = "Task" [ 841.819389] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.829445] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525aee78-655f-8d7f-d80c-bc9e3a6aa565, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.885949] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240756, 'name': Rename_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.958997] env[61898]: DEBUG nova.network.neutron [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 842.103427] env[61898]: DEBUG nova.network.neutron [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Updating instance_info_cache with network_info: [{"id": "b1aac51c-a20e-43a4-94eb-1aaf57b59f76", "address": "fa:16:3e:7f:91:db", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1aac51c-a2", "ovs_interfaceid": "b1aac51c-a20e-43a4-94eb-1aaf57b59f76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.193658] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228ffb53-bebd-4f71-9479-fdb5e0b0781a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.202294] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84f649e-a96e-465d-9634-de3623844613 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.232325] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.232970] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.233154] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.234085] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e116596d-4a98-4534-9891-0be2906bdbbf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.236993] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a08206f-39cc-4094-a79d-dbf6a81d14f9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.242184] env[61898]: DEBUG nova.compute.manager [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Received event network-vif-plugged-b1aac51c-a20e-43a4-94eb-1aaf57b59f76 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 842.242380] env[61898]: DEBUG oslo_concurrency.lockutils [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] Acquiring lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.242608] env[61898]: DEBUG oslo_concurrency.lockutils [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] Lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.242783] env[61898]: DEBUG oslo_concurrency.lockutils [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] Lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.242952] env[61898]: DEBUG nova.compute.manager [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] No waiting events found dispatching network-vif-plugged-b1aac51c-a20e-43a4-94eb-1aaf57b59f76 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 842.243137] env[61898]: WARNING nova.compute.manager [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Received unexpected event network-vif-plugged-b1aac51c-a20e-43a4-94eb-1aaf57b59f76 for instance with vm_state building and task_state spawning. [ 842.243298] env[61898]: DEBUG nova.compute.manager [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Received event network-changed-b1aac51c-a20e-43a4-94eb-1aaf57b59f76 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 842.243454] env[61898]: DEBUG nova.compute.manager [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Refreshing instance network info cache due to event network-changed-b1aac51c-a20e-43a4-94eb-1aaf57b59f76. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 842.243621] env[61898]: DEBUG oslo_concurrency.lockutils [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] Acquiring lock "refresh_cache-49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.256928] env[61898]: DEBUG nova.virt.hardware [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 842.257165] env[61898]: DEBUG nova.virt.hardware [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 842.257326] env[61898]: DEBUG nova.virt.hardware [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 842.257512] env[61898]: DEBUG nova.virt.hardware [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 842.257656] env[61898]: DEBUG nova.virt.hardware [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 842.257803] env[61898]: DEBUG nova.virt.hardware [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 842.258010] env[61898]: DEBUG nova.virt.hardware [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 842.258183] env[61898]: DEBUG nova.virt.hardware [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 842.258349] env[61898]: DEBUG nova.virt.hardware [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 842.258511] env[61898]: DEBUG nova.virt.hardware [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 842.258683] env[61898]: DEBUG nova.virt.hardware [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 842.264921] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Reconfiguring VM to attach interface {{(pid=61898) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 842.265679] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f3271b1-c829-481f-8681-b420ab5fa255 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.278592] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b73429-e16b-4994-9b66-645ed7de12db {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.295248] env[61898]: DEBUG nova.compute.provider_tree [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.298018] env[61898]: DEBUG oslo_vmware.api [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 842.298018] env[61898]: value = "task-1240757" [ 842.298018] env[61898]: _type = "Task" [ 842.298018] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.308367] env[61898]: DEBUG oslo_vmware.api [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240757, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.331478] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525aee78-655f-8d7f-d80c-bc9e3a6aa565, 'name': SearchDatastore_Task, 'duration_secs': 0.009605} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.331747] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.332013] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] e5c38d18-18e4-47dc-8445-71d3dc0c325a/e5c38d18-18e4-47dc-8445-71d3dc0c325a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.332286] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d03a35cc-6d1d-4ac9-871c-031884f140fe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.340379] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 842.340379] env[61898]: value = "task-1240758" [ 842.340379] env[61898]: _type = "Task" [ 842.340379] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.350975] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.387958] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240756, 'name': Rename_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.542803] env[61898]: DEBUG nova.compute.manager [req-686e8db4-8474-4d2f-9dca-5871c9d96445 req-ecc42cb9-2d68-4c90-ac2e-2dbd17bed921 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received event network-changed-bc699656-235b-4405-92f3-966811d6a509 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 842.543057] env[61898]: DEBUG nova.compute.manager [req-686e8db4-8474-4d2f-9dca-5871c9d96445 req-ecc42cb9-2d68-4c90-ac2e-2dbd17bed921 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Refreshing instance network info cache due to event network-changed-bc699656-235b-4405-92f3-966811d6a509. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 842.543224] env[61898]: DEBUG oslo_concurrency.lockutils [req-686e8db4-8474-4d2f-9dca-5871c9d96445 req-ecc42cb9-2d68-4c90-ac2e-2dbd17bed921 service nova] Acquiring lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.543340] env[61898]: DEBUG oslo_concurrency.lockutils [req-686e8db4-8474-4d2f-9dca-5871c9d96445 req-ecc42cb9-2d68-4c90-ac2e-2dbd17bed921 service nova] Acquired lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.543515] env[61898]: DEBUG nova.network.neutron [req-686e8db4-8474-4d2f-9dca-5871c9d96445 req-ecc42cb9-2d68-4c90-ac2e-2dbd17bed921 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Refreshing network info cache for port bc699656-235b-4405-92f3-966811d6a509 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.606243] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "refresh_cache-49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.606699] env[61898]: DEBUG nova.compute.manager [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Instance network_info: |[{"id": "b1aac51c-a20e-43a4-94eb-1aaf57b59f76", "address": "fa:16:3e:7f:91:db", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1aac51c-a2", "ovs_interfaceid": "b1aac51c-a20e-43a4-94eb-1aaf57b59f76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 842.607319] env[61898]: DEBUG oslo_concurrency.lockutils [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] Acquired lock "refresh_cache-49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.607648] env[61898]: DEBUG nova.network.neutron [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Refreshing network info cache for port b1aac51c-a20e-43a4-94eb-1aaf57b59f76 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.609509] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:91:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1aac51c-a20e-43a4-94eb-1aaf57b59f76', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.620667] env[61898]: DEBUG oslo.service.loopingcall [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 842.622971] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.623655] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 842.624261] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7eb70fc1-b138-42c1-b0a1-5bd351aa947a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.640265] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e60fcd7-526b-4d98-962b-b55c95459091 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.651308] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 842.651308] env[61898]: value = "task-1240759" [ 842.651308] env[61898]: _type = "Task" [ 842.651308] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.656863] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.656863] env[61898]: value = "task-1240760" [ 842.656863] env[61898]: _type = "Task" [ 842.656863] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.665148] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240759, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.668697] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240760, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.800378] env[61898]: DEBUG nova.scheduler.client.report [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 842.820844] env[61898]: DEBUG oslo_vmware.api [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240757, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.853185] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240758, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.891119] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240756, 'name': Rename_Task, 'duration_secs': 1.163338} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.891536] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 842.891885] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-deb4563f-8b33-4a42-a72a-9a5124910c89 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.900244] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 842.900244] env[61898]: value = "task-1240761" [ 842.900244] env[61898]: _type = "Task" [ 842.900244] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.909253] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240761, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.965513] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.965747] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 843.174506] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240759, 'name': PowerOffVM_Task, 'duration_secs': 0.252052} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.178241] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 843.179629] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 843.179629] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240760, 'name': CreateVM_Task, 'duration_secs': 0.523175} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.179793] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf93efb3-83ca-4b08-8a90-ebc0e2721d26 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.183119] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 843.183909] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.184146] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.184475] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 843.185181] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35b18de2-7777-40cb-afcb-66b5d552e553 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.190872] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 843.192216] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28b90d8c-ae5d-4201-9faf-a33635113fc7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.193847] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 843.193847] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525aae88-344e-6c0c-96af-54d128704548" [ 843.193847] env[61898]: _type = "Task" [ 843.193847] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.203626] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525aae88-344e-6c0c-96af-54d128704548, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.295380] env[61898]: DEBUG nova.network.neutron [req-686e8db4-8474-4d2f-9dca-5871c9d96445 req-ecc42cb9-2d68-4c90-ac2e-2dbd17bed921 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updated VIF entry in instance network info cache for port bc699656-235b-4405-92f3-966811d6a509. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.295844] env[61898]: DEBUG nova.network.neutron [req-686e8db4-8474-4d2f-9dca-5871c9d96445 req-ecc42cb9-2d68-4c90-ac2e-2dbd17bed921 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [{"id": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "address": "fa:16:3e:40:36:b4", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eab7c47-4a", "ovs_interfaceid": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bc699656-235b-4405-92f3-966811d6a509", "address": "fa:16:3e:67:08:7d", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc699656-23", "ovs_interfaceid": "bc699656-235b-4405-92f3-966811d6a509", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.312122] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.322679] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.164s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.322951] env[61898]: DEBUG nova.objects.instance [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lazy-loading 'resources' on Instance uuid e19e820c-154d-4e91-8631-dab9439d11a2 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.324413] env[61898]: DEBUG oslo_vmware.api [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240757, 'name': ReconfigVM_Task, 'duration_secs': 0.70586} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.325085] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.325326] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Reconfigured VM to attach interface {{(pid=61898) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 843.351953] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240758, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580418} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.353062] env[61898]: INFO nova.scheduler.client.report [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Deleted allocations for instance 626caecc-6389-4064-aafd-9968cee262ee [ 843.354017] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] e5c38d18-18e4-47dc-8445-71d3dc0c325a/e5c38d18-18e4-47dc-8445-71d3dc0c325a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.354272] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.356803] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bc6fe0c-dc89-4cf2-8e6a-ea91540faad2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.365902] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 843.365902] env[61898]: value = "task-1240763" [ 843.365902] env[61898]: _type = "Task" [ 843.365902] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.375194] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240763, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.411741] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240761, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.474583] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 843.474737] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 843.474778] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Rebuilding the list of instances to heal {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 843.570234] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 843.570618] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 843.570661] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleting the datastore file [datastore1] 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.570927] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e31987e5-4515-465d-8f59-9c0351957f49 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.580491] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 843.580491] env[61898]: value = "task-1240764" [ 843.580491] env[61898]: _type = "Task" [ 843.580491] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.589224] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240764, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.606512] env[61898]: DEBUG nova.network.neutron [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Updated VIF entry in instance network info cache for port b1aac51c-a20e-43a4-94eb-1aaf57b59f76. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 843.606882] env[61898]: DEBUG nova.network.neutron [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Updating instance_info_cache with network_info: [{"id": "b1aac51c-a20e-43a4-94eb-1aaf57b59f76", "address": "fa:16:3e:7f:91:db", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1aac51c-a2", "ovs_interfaceid": "b1aac51c-a20e-43a4-94eb-1aaf57b59f76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.705661] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525aae88-344e-6c0c-96af-54d128704548, 'name': SearchDatastore_Task, 'duration_secs': 0.013151} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.706015] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.706282] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 843.706526] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.706674] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.706865] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 843.707155] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd9dc893-d4a9-4749-99ca-6281db8b53a6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.718606] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 843.718827] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 843.719661] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cab8bac-588b-491c-9020-35eeced78ed3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.725968] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 843.725968] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5281db5e-a45c-69ab-0496-1bc125a9e910" [ 843.725968] env[61898]: _type = "Task" [ 843.725968] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.734470] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5281db5e-a45c-69ab-0496-1bc125a9e910, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.799353] env[61898]: DEBUG oslo_concurrency.lockutils [req-686e8db4-8474-4d2f-9dca-5871c9d96445 req-ecc42cb9-2d68-4c90-ac2e-2dbd17bed921 service nova] Releasing lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.831017] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e6c82660-8f10-4c8a-bc82-96b8fb4ddcdb tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.867s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.862908] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0df28d5-c291-41be-ad6e-9d5b484456e5 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "626caecc-6389-4064-aafd-9968cee262ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.305s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.880398] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240763, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101979} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.880684] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 843.881472] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a712330-b3b8-433a-bc1d-971f15703777 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.906961] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] e5c38d18-18e4-47dc-8445-71d3dc0c325a/e5c38d18-18e4-47dc-8445-71d3dc0c325a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.910283] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-117f34eb-b4d5-41f5-bbfb-ce628bd5a87a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.944114] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 843.944114] env[61898]: value = "task-1240765" [ 843.944114] env[61898]: _type = "Task" [ 843.944114] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.944433] env[61898]: DEBUG oslo_vmware.api [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240761, 'name': PowerOnVM_Task, 'duration_secs': 0.682169} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.947567] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 843.947810] env[61898]: INFO nova.compute.manager [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Took 8.79 seconds to spawn the instance on the hypervisor. [ 843.948000] env[61898]: DEBUG nova.compute.manager [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 843.952198] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80037f06-50ed-4343-89ca-c25c65d3a9e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.965343] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240765, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.981762] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Skipping network cache update for instance because it is being deleted. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10274}} [ 843.981953] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 843.982224] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 843.982306] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 844.029095] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.029095] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquired lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.029095] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Forcefully refreshing network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 844.029095] env[61898]: DEBUG nova.objects.instance [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lazy-loading 'info_cache' on Instance uuid 1fb4535d-47d8-45c5-b6d6-d05e57237b98 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.093899] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240764, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.368591} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.094226] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 844.094441] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 844.094638] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 844.110924] env[61898]: DEBUG oslo_concurrency.lockutils [req-1d31c8ec-e5cc-40b7-887b-79745c0d7c59 req-e4e11452-2e5a-454c-afd7-7d670c838307 service nova] Releasing lock "refresh_cache-49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.162981] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e01d05c-a6e5-4023-b5a6-36af8a74b23e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.172590] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7240fa-4ac3-45d4-bf49-49c510d4f8ce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.207332] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a57fe4-743c-4078-9df8-757acd1c4a88 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.216202] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b578a6-dcf8-49b9-a7ef-b34e133a9ab9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.231360] env[61898]: DEBUG nova.compute.provider_tree [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.242103] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5281db5e-a45c-69ab-0496-1bc125a9e910, 'name': SearchDatastore_Task, 'duration_secs': 0.041307} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.243584] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e6b739d-42af-4867-94b5-306a70ef06e8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.250307] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 844.250307] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]526d149c-eb3a-0d83-d8be-94f32f22b069" [ 844.250307] env[61898]: _type = "Task" [ 844.250307] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.259354] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]526d149c-eb3a-0d83-d8be-94f32f22b069, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.459399] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240765, 'name': ReconfigVM_Task, 'duration_secs': 0.41291} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.459658] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Reconfigured VM instance instance-0000004e to attach disk [datastore2] e5c38d18-18e4-47dc-8445-71d3dc0c325a/e5c38d18-18e4-47dc-8445-71d3dc0c325a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.460445] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc0036da-e5f2-40b4-8132-4703e8f18fe7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.470136] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 844.470136] env[61898]: value = "task-1240766" [ 844.470136] env[61898]: _type = "Task" [ 844.470136] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.479993] env[61898]: INFO nova.compute.manager [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Took 24.23 seconds to build instance. [ 844.484710] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240766, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.643176] env[61898]: DEBUG nova.objects.instance [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lazy-loading 'flavor' on Instance uuid d6c96dce-13ae-411a-b52a-fee484718a8a {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.736815] env[61898]: DEBUG nova.scheduler.client.report [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 844.762160] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]526d149c-eb3a-0d83-d8be-94f32f22b069, 'name': SearchDatastore_Task, 'duration_secs': 0.018001} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.762848] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.763008] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5/49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 844.763290] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-286714aa-d352-4b27-b5e6-cb35f28c6d60 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.772234] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 844.772234] env[61898]: value = "task-1240767" [ 844.772234] env[61898]: _type = "Task" [ 844.772234] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.782212] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240767, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.986301] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240766, 'name': Rename_Task, 'duration_secs': 0.213071} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.986784] env[61898]: DEBUG oslo_concurrency.lockutils [None req-754ce2ef-536d-4a88-8c13-1ee039958870 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.747s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.987085] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.987403] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90e5763a-c3e1-4526-a9ab-c0b0dd2f6a7e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.995384] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 844.995384] env[61898]: value = "task-1240768" [ 844.995384] env[61898]: _type = "Task" [ 844.995384] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.004555] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240768, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.138669] env[61898]: DEBUG nova.virt.hardware [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 845.139598] env[61898]: DEBUG nova.virt.hardware [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 845.140429] env[61898]: DEBUG nova.virt.hardware [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.140429] env[61898]: DEBUG nova.virt.hardware [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 845.140429] env[61898]: DEBUG nova.virt.hardware [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.140606] env[61898]: DEBUG nova.virt.hardware [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 845.140935] env[61898]: DEBUG nova.virt.hardware [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 845.141163] env[61898]: DEBUG nova.virt.hardware [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 845.141381] env[61898]: DEBUG nova.virt.hardware [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 845.141589] env[61898]: DEBUG nova.virt.hardware [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 845.141834] env[61898]: DEBUG nova.virt.hardware [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 845.142907] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b330cc5-d939-451a-983b-66c74ad73869 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.148667] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquiring lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.149148] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquired lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.157968] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3918ddf9-68ff-4895-8b09-05707659f8a5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.177500] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:16:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5650e9db-397e-427c-903b-85817fe18e52', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 845.185960] env[61898]: DEBUG oslo.service.loopingcall [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.186929] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 845.187349] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-959bd317-9e5c-4b40-bbf7-760cbc07925c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.221175] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 845.221175] env[61898]: value = "task-1240769" [ 845.221175] env[61898]: _type = "Task" [ 845.221175] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.234740] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240769, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.242337] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.920s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.245338] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.272s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.245829] env[61898]: DEBUG nova.objects.instance [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lazy-loading 'resources' on Instance uuid 466cbf07-e945-48d4-a103-5a3ea2b7adf6 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.278294] env[61898]: INFO nova.scheduler.client.report [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Deleted allocations for instance e19e820c-154d-4e91-8631-dab9439d11a2 [ 845.290812] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240767, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.440583] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "4c744673-0d9b-44ef-938f-372b101a2053" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.440878] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "4c744673-0d9b-44ef-938f-372b101a2053" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.441171] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "4c744673-0d9b-44ef-938f-372b101a2053-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.441399] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "4c744673-0d9b-44ef-938f-372b101a2053-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.441590] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "4c744673-0d9b-44ef-938f-372b101a2053-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.444523] env[61898]: INFO nova.compute.manager [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Terminating instance [ 845.508070] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240768, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.719142] env[61898]: INFO nova.compute.manager [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Rescuing [ 845.719142] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "refresh_cache-9b7b9962-fda1-46af-9ecc-ea5b352d5193" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.719142] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired lock "refresh_cache-9b7b9962-fda1-46af-9ecc-ea5b352d5193" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.719142] env[61898]: DEBUG nova.network.neutron [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.739969] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240769, 'name': CreateVM_Task, 'duration_secs': 0.434931} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.740367] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.742523] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.742964] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.743614] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 845.745023] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8e32420-494f-4023-be39-c0d7040f5cfd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.754178] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 845.754178] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5273e63d-438f-9461-c1a1-37b172053a83" [ 845.754178] env[61898]: _type = "Task" [ 845.754178] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.768572] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5273e63d-438f-9461-c1a1-37b172053a83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.787249] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240767, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.698677} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.790366] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5/49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 845.791230] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 845.794571] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-780c8810-ead3-486e-9cec-30fce99f465c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.797142] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4838d97d-7572-4da4-af57-f351ae73cb0b tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "e19e820c-154d-4e91-8631-dab9439d11a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.065s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.805324] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 845.805324] env[61898]: value = "task-1240770" [ 845.805324] env[61898]: _type = "Task" [ 845.805324] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.821192] env[61898]: DEBUG nova.network.neutron [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.824129] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240770, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.856189] env[61898]: DEBUG oslo_concurrency.lockutils [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-8658c19e-7e0e-473b-a26d-7bb0da23b75f" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.856467] env[61898]: DEBUG oslo_concurrency.lockutils [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-8658c19e-7e0e-473b-a26d-7bb0da23b75f" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.856829] env[61898]: DEBUG nova.objects.instance [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'flavor' on Instance uuid e851d73d-58f0-486a-a95c-70d07e5faad2 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.861395] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Updating instance_info_cache with network_info: [{"id": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "address": "fa:16:3e:4c:bb:e6", "network": {"id": "a7782ec9-c3cb-41be-b52c-f40deb036970", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-870279198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ca09762c2e4b119437aa5b1a36e133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9c5b33c-50", "ovs_interfaceid": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.949487] env[61898]: DEBUG nova.compute.manager [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 845.949723] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 845.950641] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1f73320-4006-415c-917a-562bbdf2cb41 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.961632] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 845.961938] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6d211c8-0ddf-449c-a4c7-997326653239 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.968851] env[61898]: DEBUG oslo_vmware.api [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 845.968851] env[61898]: value = "task-1240771" [ 845.968851] env[61898]: _type = "Task" [ 845.968851] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.977332] env[61898]: DEBUG oslo_vmware.api [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.009902] env[61898]: DEBUG oslo_vmware.api [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240768, 'name': PowerOnVM_Task, 'duration_secs': 0.724382} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.010254] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 846.010488] env[61898]: INFO nova.compute.manager [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Took 8.41 seconds to spawn the instance on the hypervisor. [ 846.010670] env[61898]: DEBUG nova.compute.manager [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 846.011659] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a289dfd-0f8b-4e4f-be75-c37235b1ac1d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.033610] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e93cf38-ee15-4646-9c20-5ee535556e03 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.041846] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba60801a-1eb6-4bfb-b924-8919299a6c31 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.078359] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa69acb-c7e6-41ab-a3bb-3dbd50123778 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.088888] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78817038-58fc-4f85-833b-e46ae03fe942 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.094370] env[61898]: DEBUG nova.compute.manager [req-21255833-a3cb-4e88-86be-94be0424747a req-264ae557-133a-4a95-8012-bb5cbf6a74ce service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Received event network-changed-93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 846.094579] env[61898]: DEBUG nova.compute.manager [req-21255833-a3cb-4e88-86be-94be0424747a req-264ae557-133a-4a95-8012-bb5cbf6a74ce service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Refreshing instance network info cache due to event network-changed-93f00603-54ee-451f-9579-32f82d4923b0. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 846.094785] env[61898]: DEBUG oslo_concurrency.lockutils [req-21255833-a3cb-4e88-86be-94be0424747a req-264ae557-133a-4a95-8012-bb5cbf6a74ce service nova] Acquiring lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.107765] env[61898]: DEBUG nova.compute.provider_tree [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.266480] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5273e63d-438f-9461-c1a1-37b172053a83, 'name': SearchDatastore_Task, 'duration_secs': 0.029671} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.267294] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.267294] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.267511] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.267680] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.268075] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.268412] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-148bbe2e-3598-422c-a92d-ac32a72bb469 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.278679] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.278920] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.279751] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63821fb9-9572-4d63-83ec-d663d9f33fb2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.286358] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 846.286358] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52fe8b7c-dce5-73d2-1605-b0246ddf163e" [ 846.286358] env[61898]: _type = "Task" [ 846.286358] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.295244] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52fe8b7c-dce5-73d2-1605-b0246ddf163e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.315907] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240770, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077205} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.316250] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 846.317100] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ea313e-55eb-4426-93c9-2998cc38f2bc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.345499] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5/49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 846.348937] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac4327cb-7316-43a4-ac0b-74039a660072 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.368049] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Releasing lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.368325] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Updated the network info_cache for instance {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 846.368878] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.369671] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.369876] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.370084] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.370222] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.370397] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.370578] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61898) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 846.370854] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 846.377603] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 846.377603] env[61898]: value = "task-1240772" [ 846.377603] env[61898]: _type = "Task" [ 846.377603] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.392458] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240772, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.187152] env[61898]: DEBUG nova.network.neutron [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Updating instance_info_cache with network_info: [{"id": "b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64", "address": "fa:16:3e:8a:0a:5a", "network": {"id": "616f0775-546c-4124-b414-c2ce3228e7ec", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-245576291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a984459656494b738b60ec791c579316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb7fb0979-2b", "ovs_interfaceid": "b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.194573] env[61898]: DEBUG nova.scheduler.client.report [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 847.195892] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.197262] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] Acquiring lock "refresh_cache-e5c38d18-18e4-47dc-8445-71d3dc0c325a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.197262] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] Acquired lock "refresh_cache-e5c38d18-18e4-47dc-8445-71d3dc0c325a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.197565] env[61898]: DEBUG nova.network.neutron [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 847.208861] env[61898]: INFO nova.compute.manager [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Took 25.68 seconds to build instance. [ 847.215599] env[61898]: DEBUG oslo_vmware.api [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240771, 'name': PowerOffVM_Task, 'duration_secs': 0.306722} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.215814] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.215985] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.216483] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c754b8cd-62cb-43c0-9abc-590db6ebc133 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.227160] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240772, 'name': ReconfigVM_Task, 'duration_secs': 0.350687} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.227727] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52fe8b7c-dce5-73d2-1605-b0246ddf163e, 'name': SearchDatastore_Task, 'duration_secs': 0.016103} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.229143] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5/49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 847.230090] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12c6f111-621e-481d-8141-7db59a60be6c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.232347] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e044472-1d49-406d-8737-0b013ac33b46 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.243598] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 847.243598] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b9b953-9efe-9af6-a49f-c11cb7ae0026" [ 847.243598] env[61898]: _type = "Task" [ 847.243598] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.244234] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 847.244234] env[61898]: value = "task-1240774" [ 847.244234] env[61898]: _type = "Task" [ 847.244234] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.260085] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240774, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.265155] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b9b953-9efe-9af6-a49f-c11cb7ae0026, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.309498] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.309747] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.310015] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Deleting the datastore file [datastore1] 4c744673-0d9b-44ef-938f-372b101a2053 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.310339] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03731054-fc21-46f5-907f-c239b40b3270 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.319873] env[61898]: DEBUG oslo_vmware.api [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for the task: (returnval){ [ 847.319873] env[61898]: value = "task-1240775" [ 847.319873] env[61898]: _type = "Task" [ 847.319873] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.328990] env[61898]: DEBUG oslo_vmware.api [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240775, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.344142] env[61898]: DEBUG nova.objects.instance [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'pci_requests' on Instance uuid e851d73d-58f0-486a-a95c-70d07e5faad2 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 847.510945] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd827-47b6-a74a-1cad-6f4036ff669e/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 847.511870] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9284cd-2860-4f5d-a278-7b1934a81edb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.519243] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd827-47b6-a74a-1cad-6f4036ff669e/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 847.519412] env[61898]: ERROR oslo_vmware.rw_handles [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd827-47b6-a74a-1cad-6f4036ff669e/disk-0.vmdk due to incomplete transfer. [ 847.519641] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-16929cc4-77cf-46fe-8edf-9d252a7c725b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.527657] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/520bd827-47b6-a74a-1cad-6f4036ff669e/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 847.527890] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Uploaded image 7d0b0872-de3d-40f0-91fd-fb21768b8b13 to the Glance image server {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 847.530368] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 847.530626] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-eca23864-aae3-4a0c-9251-09f70306fce1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.537249] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 847.537249] env[61898]: value = "task-1240776" [ 847.537249] env[61898]: _type = "Task" [ 847.537249] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.546442] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240776, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.547561] env[61898]: DEBUG nova.network.neutron [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updating instance_info_cache with network_info: [{"id": "93f00603-54ee-451f-9579-32f82d4923b0", "address": "fa:16:3e:45:b7:24", "network": {"id": "48f12b71-35b5-474e-bfdb-7ef68e2d7b22", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-66759866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f368913c359420cbd16ef48aa83e27c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f00603-54", "ovs_interfaceid": "93f00603-54ee-451f-9579-32f82d4923b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.699920] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Releasing lock "refresh_cache-9b7b9962-fda1-46af-9ecc-ea5b352d5193" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.702795] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.458s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.706185] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.448s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.706185] env[61898]: DEBUG nova.objects.instance [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lazy-loading 'resources' on Instance uuid 51f33e74-0bb3-488c-9a6d-d1ccc53f469b {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 847.715274] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9f8a78ca-e3f7-4458-9d2c-efec2f3e7d80 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.203s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.758138] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240774, 'name': Rename_Task, 'duration_secs': 0.249947} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.761961] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.762658] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b9b953-9efe-9af6-a49f-c11cb7ae0026, 'name': SearchDatastore_Task, 'duration_secs': 0.022303} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.762658] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78ec100f-3d59-41e9-8c78-38c0d4238c23 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.764251] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.764515] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d/43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.764765] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ab0a271-33f8-43b2-8f86-5e2dfac67726 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.775345] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 847.775345] env[61898]: value = "task-1240778" [ 847.775345] env[61898]: _type = "Task" [ 847.775345] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.775608] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 847.775608] env[61898]: value = "task-1240777" [ 847.775608] env[61898]: _type = "Task" [ 847.775608] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.787463] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240778, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.791191] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240777, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.830356] env[61898]: DEBUG oslo_vmware.api [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Task: {'id': task-1240775, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.448186} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.830613] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 847.830805] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 847.831063] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 847.831280] env[61898]: INFO nova.compute.manager [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Took 1.88 seconds to destroy the instance on the hypervisor. [ 847.831535] env[61898]: DEBUG oslo.service.loopingcall [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.831746] env[61898]: DEBUG nova.compute.manager [-] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 847.831911] env[61898]: DEBUG nova.network.neutron [-] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 847.847129] env[61898]: DEBUG nova.objects.base [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 847.847373] env[61898]: DEBUG nova.network.neutron [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 847.915955] env[61898]: DEBUG nova.policy [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080b70de4bd5465e8e696943b90f4ff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2c65efa327e403284ad2e78b3c7b7d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 848.047461] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240776, 'name': Destroy_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.050193] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Releasing lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.050441] env[61898]: DEBUG nova.compute.manager [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Inject network info {{(pid=61898) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7548}} [ 848.050676] env[61898]: DEBUG nova.compute.manager [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] network_info to inject: |[{"id": "93f00603-54ee-451f-9579-32f82d4923b0", "address": "fa:16:3e:45:b7:24", "network": {"id": "48f12b71-35b5-474e-bfdb-7ef68e2d7b22", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-66759866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f368913c359420cbd16ef48aa83e27c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f00603-54", "ovs_interfaceid": "93f00603-54ee-451f-9579-32f82d4923b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7549}} [ 848.055483] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Reconfiguring VM instance to set the machine id {{(pid=61898) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 848.055793] env[61898]: DEBUG oslo_concurrency.lockutils [req-21255833-a3cb-4e88-86be-94be0424747a req-264ae557-133a-4a95-8012-bb5cbf6a74ce service nova] Acquired lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.055968] env[61898]: DEBUG nova.network.neutron [req-21255833-a3cb-4e88-86be-94be0424747a req-264ae557-133a-4a95-8012-bb5cbf6a74ce service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Refreshing network info cache for port 93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 848.057159] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f3323c7-1f01-4621-9f83-5fcf88498875 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.077738] env[61898]: DEBUG oslo_vmware.api [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 848.077738] env[61898]: value = "task-1240779" [ 848.077738] env[61898]: _type = "Task" [ 848.077738] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.086986] env[61898]: DEBUG oslo_vmware.api [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240779, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.199266] env[61898]: DEBUG nova.network.neutron [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Updating instance_info_cache with network_info: [{"id": "231d39d3-2188-4318-a44d-7fbd419d0624", "address": "fa:16:3e:47:e6:6b", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap231d39d3-21", "ovs_interfaceid": "231d39d3-2188-4318-a44d-7fbd419d0624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.230122] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6def712b-263a-4780-ab6c-17ad90955f99 tempest-ServerActionsV293TestJSON-828892759 tempest-ServerActionsV293TestJSON-828892759-project-member] Lock "466cbf07-e945-48d4-a103-5a3ea2b7adf6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.112s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.298356] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240778, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.306502] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240777, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.549517] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240776, 'name': Destroy_Task, 'duration_secs': 0.782221} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.549798] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Destroyed the VM [ 848.550075] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 848.550375] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2458c587-0e0a-4c33-bd1b-50dc94fc5458 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.559224] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 848.559224] env[61898]: value = "task-1240780" [ 848.559224] env[61898]: _type = "Task" [ 848.559224] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.562454] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607123b6-ee42-4a46-b511-4e5cc419fc2f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.574187] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240780, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.576455] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e705e19-b18f-4bfe-b95b-468831f3886e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.588602] env[61898]: DEBUG oslo_vmware.api [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240779, 'name': ReconfigVM_Task, 'duration_secs': 0.203069} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.614322] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cd696aa5-7763-4897-96ef-5ce275401ba5 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Reconfigured VM instance to set the machine id {{(pid=61898) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 848.618221] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8394fd-8fb3-4e74-ad79-e2633304c0b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.627760] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a04d008-a3fc-4f2b-a20b-ee21430681fd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.647058] env[61898]: DEBUG nova.compute.provider_tree [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.701709] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] Releasing lock "refresh_cache-e5c38d18-18e4-47dc-8445-71d3dc0c325a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.702080] env[61898]: DEBUG nova.compute.manager [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Inject network info {{(pid=61898) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7548}} [ 848.702250] env[61898]: DEBUG nova.compute.manager [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] network_info to inject: |[{"id": "231d39d3-2188-4318-a44d-7fbd419d0624", "address": "fa:16:3e:47:e6:6b", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap231d39d3-21", "ovs_interfaceid": "231d39d3-2188-4318-a44d-7fbd419d0624", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7549}} [ 848.707374] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Reconfiguring VM instance to set the machine id {{(pid=61898) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 848.707667] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c61a73f0-b1e2-49a5-af26-9674ab141989 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.725481] env[61898]: DEBUG oslo_vmware.api [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] Waiting for the task: (returnval){ [ 848.725481] env[61898]: value = "task-1240781" [ 848.725481] env[61898]: _type = "Task" [ 848.725481] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.735609] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.736064] env[61898]: DEBUG oslo_vmware.api [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] Task: {'id': task-1240781, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.736900] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e0f0c1e-7000-470b-960c-613344c3daaf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.744549] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 848.744549] env[61898]: value = "task-1240782" [ 848.744549] env[61898]: _type = "Task" [ 848.744549] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.757498] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.791470] env[61898]: DEBUG oslo_vmware.api [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240777, 'name': PowerOnVM_Task, 'duration_secs': 0.572102} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.794823] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 848.795069] env[61898]: INFO nova.compute.manager [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Took 8.80 seconds to spawn the instance on the hypervisor. [ 848.795259] env[61898]: DEBUG nova.compute.manager [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 848.795567] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240778, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.76136} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.796308] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72a336e-f807-4e41-85a5-63882bf4ec6e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.798873] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d/43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 848.799080] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 848.799324] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7795f48d-adbb-42f2-88a7-dc9393189661 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.812692] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 848.812692] env[61898]: value = "task-1240783" [ 848.812692] env[61898]: _type = "Task" [ 848.812692] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.822744] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240783, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.852692] env[61898]: DEBUG nova.network.neutron [-] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.068925] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240780, 'name': RemoveSnapshot_Task} progress is 58%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.153563] env[61898]: DEBUG nova.scheduler.client.report [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 849.239022] env[61898]: DEBUG oslo_vmware.api [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] Task: {'id': task-1240781, 'name': ReconfigVM_Task, 'duration_secs': 0.203212} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.239022] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-dbbefc19-d922-41b7-84b4-28ef8089e131 tempest-ServersAdminTestJSON-1880916979 tempest-ServersAdminTestJSON-1880916979-project-admin] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Reconfigured VM instance to set the machine id {{(pid=61898) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 849.255405] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240782, 'name': PowerOffVM_Task, 'duration_secs': 0.322118} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.256021] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.256893] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903593c0-0ded-47e2-9821-6f90b1696a4d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.278675] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f87f2a-2fac-447c-8a1e-57fac3acc0bb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.319801] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 849.320708] env[61898]: DEBUG nova.network.neutron [req-21255833-a3cb-4e88-86be-94be0424747a req-264ae557-133a-4a95-8012-bb5cbf6a74ce service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updated VIF entry in instance network info cache for port 93f00603-54ee-451f-9579-32f82d4923b0. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 849.324710] env[61898]: DEBUG nova.network.neutron [req-21255833-a3cb-4e88-86be-94be0424747a req-264ae557-133a-4a95-8012-bb5cbf6a74ce service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updating instance_info_cache with network_info: [{"id": "93f00603-54ee-451f-9579-32f82d4923b0", "address": "fa:16:3e:45:b7:24", "network": {"id": "48f12b71-35b5-474e-bfdb-7ef68e2d7b22", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-66759866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f368913c359420cbd16ef48aa83e27c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f00603-54", "ovs_interfaceid": "93f00603-54ee-451f-9579-32f82d4923b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.330064] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-703dafb4-8813-4a0e-9b01-a762ebf29667 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.334600] env[61898]: INFO nova.compute.manager [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Took 25.56 seconds to build instance. [ 849.340788] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240783, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076202} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.344264] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 849.344918] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 849.344918] env[61898]: value = "task-1240784" [ 849.344918] env[61898]: _type = "Task" [ 849.344918] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.345630] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90737a15-a773-4e63-877b-c2cb34b9ab0c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.358237] env[61898]: INFO nova.compute.manager [-] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Took 1.53 seconds to deallocate network for instance. [ 849.370762] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] VM already powered off {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 849.371070] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.371488] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.371629] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.371702] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.381080] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d/43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.384709] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17d88a88-c943-447c-9a4c-abd703031707 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.387589] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3476628-85d1-4e03-bfcc-d2e7df2ec433 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.410940] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 849.410940] env[61898]: value = "task-1240785" [ 849.410940] env[61898]: _type = "Task" [ 849.410940] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.415413] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.415611] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 849.417122] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-921afb3d-6690-4224-a3ff-e6aa02a3d4aa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.426197] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240785, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.426609] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 849.426609] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]527e8806-7de6-4c71-2d42-bd432cc5b846" [ 849.426609] env[61898]: _type = "Task" [ 849.426609] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.438513] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527e8806-7de6-4c71-2d42-bd432cc5b846, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.521970] env[61898]: DEBUG nova.compute.manager [req-47c139de-0ddc-4dbd-bb25-d5a39b641359 req-af1fda4c-411a-402f-a6b1-0ec80c824d6b service nova] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Received event network-vif-deleted-0d2007ae-42aa-44eb-9414-3216e1c433d4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 849.548113] env[61898]: DEBUG nova.network.neutron [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Successfully updated port: 8658c19e-7e0e-473b-a26d-7bb0da23b75f {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 849.570336] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240780, 'name': RemoveSnapshot_Task} progress is 58%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.660330] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.955s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.662925] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.348s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.663290] env[61898]: DEBUG nova.objects.instance [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lazy-loading 'resources' on Instance uuid 4db53fdf-7107-43c5-a57c-65d54b807909 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.683107] env[61898]: INFO nova.scheduler.client.report [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted allocations for instance 51f33e74-0bb3-488c-9a6d-d1ccc53f469b [ 849.833170] env[61898]: DEBUG oslo_concurrency.lockutils [req-21255833-a3cb-4e88-86be-94be0424747a req-264ae557-133a-4a95-8012-bb5cbf6a74ce service nova] Releasing lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.837179] env[61898]: DEBUG oslo_concurrency.lockutils [None req-085e5729-33c7-4e8b-8e36-12f0d270dc0c tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.087s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.903876] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.921728] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.937067] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527e8806-7de6-4c71-2d42-bd432cc5b846, 'name': SearchDatastore_Task, 'duration_secs': 0.014928} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.937914] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fb00563-f73b-475f-a381-9710c739e13d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.944285] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 849.944285] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ad050c-051b-8df8-2e4d-25f1ca84f36c" [ 849.944285] env[61898]: _type = "Task" [ 849.944285] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.952465] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ad050c-051b-8df8-2e4d-25f1ca84f36c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.050684] env[61898]: DEBUG oslo_concurrency.lockutils [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.050990] env[61898]: DEBUG oslo_concurrency.lockutils [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.051205] env[61898]: DEBUG nova.network.neutron [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.060620] env[61898]: DEBUG nova.objects.instance [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lazy-loading 'flavor' on Instance uuid d6c96dce-13ae-411a-b52a-fee484718a8a {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 850.073051] env[61898]: DEBUG oslo_vmware.api [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240780, 'name': RemoveSnapshot_Task, 'duration_secs': 1.262227} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.073315] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 850.073551] env[61898]: INFO nova.compute.manager [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Took 17.25 seconds to snapshot the instance on the hypervisor. [ 850.167585] env[61898]: DEBUG nova.objects.instance [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lazy-loading 'numa_topology' on Instance uuid 4db53fdf-7107-43c5-a57c-65d54b807909 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 850.175464] env[61898]: DEBUG nova.compute.manager [req-c29f1184-5bd1-4aa5-8cac-d8e56b6a162e req-c58b4ab1-7a12-4a5d-8ac8-790ce43ae65f service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received event network-vif-plugged-8658c19e-7e0e-473b-a26d-7bb0da23b75f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 850.175581] env[61898]: DEBUG oslo_concurrency.lockutils [req-c29f1184-5bd1-4aa5-8cac-d8e56b6a162e req-c58b4ab1-7a12-4a5d-8ac8-790ce43ae65f service nova] Acquiring lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.175804] env[61898]: DEBUG oslo_concurrency.lockutils [req-c29f1184-5bd1-4aa5-8cac-d8e56b6a162e req-c58b4ab1-7a12-4a5d-8ac8-790ce43ae65f service nova] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.175943] env[61898]: DEBUG oslo_concurrency.lockutils [req-c29f1184-5bd1-4aa5-8cac-d8e56b6a162e req-c58b4ab1-7a12-4a5d-8ac8-790ce43ae65f service nova] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.176162] env[61898]: DEBUG nova.compute.manager [req-c29f1184-5bd1-4aa5-8cac-d8e56b6a162e req-c58b4ab1-7a12-4a5d-8ac8-790ce43ae65f service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] No waiting events found dispatching network-vif-plugged-8658c19e-7e0e-473b-a26d-7bb0da23b75f {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 850.176299] env[61898]: WARNING nova.compute.manager [req-c29f1184-5bd1-4aa5-8cac-d8e56b6a162e req-c58b4ab1-7a12-4a5d-8ac8-790ce43ae65f service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received unexpected event network-vif-plugged-8658c19e-7e0e-473b-a26d-7bb0da23b75f for instance with vm_state active and task_state None. [ 850.189344] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2a4a2b3d-cdf8-406a-9b3e-a0d58fa3f9b0 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "51f33e74-0bb3-488c-9a6d-d1ccc53f469b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.378s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.422363] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240785, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.456327] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ad050c-051b-8df8-2e4d-25f1ca84f36c, 'name': SearchDatastore_Task, 'duration_secs': 0.009596} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.456599] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.456860] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 9b7b9962-fda1-46af-9ecc-ea5b352d5193/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk. {{(pid=61898) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 850.457154] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81763e76-54bc-44e8-b51b-0907d1bf15ea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.464490] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 850.464490] env[61898]: value = "task-1240786" [ 850.464490] env[61898]: _type = "Task" [ 850.464490] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.473376] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240786, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.566801] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquiring lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.567026] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquired lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.593779] env[61898]: WARNING nova.network.neutron [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] 89882853-88ec-48f1-a883-3be9e65f9fd8 already exists in list: networks containing: ['89882853-88ec-48f1-a883-3be9e65f9fd8']. ignoring it [ 850.594061] env[61898]: WARNING nova.network.neutron [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] 89882853-88ec-48f1-a883-3be9e65f9fd8 already exists in list: networks containing: ['89882853-88ec-48f1-a883-3be9e65f9fd8']. ignoring it [ 850.633305] env[61898]: DEBUG nova.compute.manager [None req-9b62e2a5-9132-4232-bb72-c043f23b971c tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Found 1 images (rotation: 2) {{(pid=61898) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4883}} [ 850.671185] env[61898]: DEBUG nova.objects.base [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Object Instance<4db53fdf-7107-43c5-a57c-65d54b807909> lazy-loaded attributes: resources,numa_topology {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 850.840563] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ebb99d-7fee-4462-88ac-9da50dede867 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.848344] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-821948f4-64f5-4b40-8550-0a3e71ac3beb tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Suspending the VM {{(pid=61898) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 850.848719] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-178d3cd9-ddb3-4ce9-beea-b1eaceb501c2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.862562] env[61898]: DEBUG oslo_vmware.api [None req-821948f4-64f5-4b40-8550-0a3e71ac3beb tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 850.862562] env[61898]: value = "task-1240787" [ 850.862562] env[61898]: _type = "Task" [ 850.862562] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.873082] env[61898]: DEBUG oslo_vmware.api [None req-821948f4-64f5-4b40-8550-0a3e71ac3beb tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240787, 'name': SuspendVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.930954] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240785, 'name': ReconfigVM_Task, 'duration_secs': 1.045004} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.930954] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d/43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.931707] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34c0f9ae-59ce-42b9-96d5-87dcb2ef16a0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.940558] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 850.940558] env[61898]: value = "task-1240788" [ 850.940558] env[61898]: _type = "Task" [ 850.940558] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.954357] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240788, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.976176] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240786, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460289} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.977306] env[61898]: INFO nova.virt.vmwareapi.ds_util [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 9b7b9962-fda1-46af-9ecc-ea5b352d5193/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk. [ 850.978331] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ff73fa-a16c-46c6-8b1c-2cded8346ca7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.984822] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe49d61-8ec1-4e6d-8b9e-e2be15f6b9e2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.009558] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32c7e73-3d8e-4b1e-afa1-6e513fb5b466 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.022091] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 9b7b9962-fda1-46af-9ecc-ea5b352d5193/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.023812] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b0f5209-0042-473c-9def-59574f5af9fb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.073832] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 851.073832] env[61898]: value = "task-1240789" [ 851.073832] env[61898]: _type = "Task" [ 851.073832] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.077884] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23720263-1b77-4c78-b43b-66b2c6c64e9d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.095052] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb3fd93-f226-4bea-b9bf-b6282f4a71a7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.099422] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240789, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.112201] env[61898]: DEBUG nova.compute.provider_tree [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.114141] env[61898]: DEBUG nova.network.neutron [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 851.196114] env[61898]: DEBUG nova.network.neutron [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [{"id": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "address": "fa:16:3e:40:36:b4", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eab7c47-4a", "ovs_interfaceid": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bc699656-235b-4405-92f3-966811d6a509", "address": "fa:16:3e:67:08:7d", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc699656-23", "ovs_interfaceid": "bc699656-235b-4405-92f3-966811d6a509", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8658c19e-7e0e-473b-a26d-7bb0da23b75f", "address": "fa:16:3e:63:cc:4a", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8658c19e-7e", "ovs_interfaceid": "8658c19e-7e0e-473b-a26d-7bb0da23b75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.373855] env[61898]: DEBUG oslo_vmware.api [None req-821948f4-64f5-4b40-8550-0a3e71ac3beb tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240787, 'name': SuspendVM_Task} progress is 58%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.451032] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240788, 'name': Rename_Task, 'duration_secs': 0.17692} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.451378] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 851.451624] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e828807c-c92b-4ca0-92c8-d46a18934ae0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.458837] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 851.458837] env[61898]: value = "task-1240790" [ 851.458837] env[61898]: _type = "Task" [ 851.458837] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.467370] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240790, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.588945] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240789, 'name': ReconfigVM_Task, 'duration_secs': 0.349} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.589324] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 9b7b9962-fda1-46af-9ecc-ea5b352d5193/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 851.590212] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bf6f76-a8a2-4b47-81a4-fdf6fe7235a3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.617265] env[61898]: DEBUG nova.scheduler.client.report [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 851.622558] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0dd344c4-2551-4629-aa05-cdb0cd66f376 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.641271] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 851.641271] env[61898]: value = "task-1240791" [ 851.641271] env[61898]: _type = "Task" [ 851.641271] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.651591] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240791, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.700062] env[61898]: DEBUG oslo_concurrency.lockutils [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.700597] env[61898]: DEBUG oslo_concurrency.lockutils [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.701191] env[61898]: DEBUG oslo_concurrency.lockutils [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.702170] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9592e5c5-db15-4f75-89f3-dc666eebbbe4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.728257] env[61898]: DEBUG nova.virt.hardware [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 851.728619] env[61898]: DEBUG nova.virt.hardware [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 851.728857] env[61898]: DEBUG nova.virt.hardware [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.729147] env[61898]: DEBUG nova.virt.hardware [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 851.729371] env[61898]: DEBUG nova.virt.hardware [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.729589] env[61898]: DEBUG nova.virt.hardware [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 851.729951] env[61898]: DEBUG nova.virt.hardware [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 851.730217] env[61898]: DEBUG nova.virt.hardware [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 851.730472] env[61898]: DEBUG nova.virt.hardware [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 851.730725] env[61898]: DEBUG nova.virt.hardware [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 851.731040] env[61898]: DEBUG nova.virt.hardware [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.741551] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Reconfiguring VM to attach interface {{(pid=61898) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 851.745848] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a329315a-3172-4bb5-ac12-2ab96b793c87 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.773673] env[61898]: DEBUG oslo_vmware.api [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 851.773673] env[61898]: value = "task-1240792" [ 851.773673] env[61898]: _type = "Task" [ 851.773673] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.783595] env[61898]: DEBUG oslo_vmware.api [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240792, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.875102] env[61898]: DEBUG oslo_vmware.api [None req-821948f4-64f5-4b40-8550-0a3e71ac3beb tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240787, 'name': SuspendVM_Task} progress is 58%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.944645] env[61898]: DEBUG nova.network.neutron [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updating instance_info_cache with network_info: [{"id": "93f00603-54ee-451f-9579-32f82d4923b0", "address": "fa:16:3e:45:b7:24", "network": {"id": "48f12b71-35b5-474e-bfdb-7ef68e2d7b22", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-66759866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f368913c359420cbd16ef48aa83e27c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f00603-54", "ovs_interfaceid": "93f00603-54ee-451f-9579-32f82d4923b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.971186] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240790, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.135070] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.472s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.138328] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.844s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.138637] env[61898]: DEBUG nova.objects.instance [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lazy-loading 'resources' on Instance uuid eda63357-6749-4652-914a-dc5b69163eb6 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 852.153164] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240791, 'name': ReconfigVM_Task, 'duration_secs': 0.271076} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.153824] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 852.154199] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9977d86-c73a-40f0-8134-0c3c102c9098 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.163046] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 852.163046] env[61898]: value = "task-1240793" [ 852.163046] env[61898]: _type = "Task" [ 852.163046] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.175872] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.176219] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.176453] env[61898]: INFO nova.compute.manager [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Shelving [ 852.178118] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240793, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.285113] env[61898]: DEBUG oslo_vmware.api [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240792, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.324204] env[61898]: DEBUG nova.compute.manager [req-b7042960-bc3b-461b-9b45-22251eed9c8f req-92e5809c-8905-44b9-9c31-d87d50429136 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Received event network-changed-93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 852.324204] env[61898]: DEBUG nova.compute.manager [req-b7042960-bc3b-461b-9b45-22251eed9c8f req-92e5809c-8905-44b9-9c31-d87d50429136 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Refreshing instance network info cache due to event network-changed-93f00603-54ee-451f-9579-32f82d4923b0. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 852.324204] env[61898]: DEBUG oslo_concurrency.lockutils [req-b7042960-bc3b-461b-9b45-22251eed9c8f req-92e5809c-8905-44b9-9c31-d87d50429136 service nova] Acquiring lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.375640] env[61898]: DEBUG oslo_vmware.api [None req-821948f4-64f5-4b40-8550-0a3e71ac3beb tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240787, 'name': SuspendVM_Task, 'duration_secs': 1.465119} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.375916] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-821948f4-64f5-4b40-8550-0a3e71ac3beb tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Suspended the VM {{(pid=61898) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 852.376113] env[61898]: DEBUG nova.compute.manager [None req-821948f4-64f5-4b40-8550-0a3e71ac3beb tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 852.377024] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb299eef-1914-4881-b147-95faf18d187b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.447760] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Releasing lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.448079] env[61898]: DEBUG nova.compute.manager [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Inject network info {{(pid=61898) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7548}} [ 852.448414] env[61898]: DEBUG nova.compute.manager [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] network_info to inject: |[{"id": "93f00603-54ee-451f-9579-32f82d4923b0", "address": "fa:16:3e:45:b7:24", "network": {"id": "48f12b71-35b5-474e-bfdb-7ef68e2d7b22", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-66759866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f368913c359420cbd16ef48aa83e27c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f00603-54", "ovs_interfaceid": "93f00603-54ee-451f-9579-32f82d4923b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7549}} [ 852.453890] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Reconfiguring VM instance to set the machine id {{(pid=61898) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 852.454349] env[61898]: DEBUG oslo_concurrency.lockutils [req-b7042960-bc3b-461b-9b45-22251eed9c8f req-92e5809c-8905-44b9-9c31-d87d50429136 service nova] Acquired lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.454578] env[61898]: DEBUG nova.network.neutron [req-b7042960-bc3b-461b-9b45-22251eed9c8f req-92e5809c-8905-44b9-9c31-d87d50429136 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Refreshing network info cache for port 93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 852.456812] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05dbbacb-874d-4837-aaad-981d428d0bb8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.491760] env[61898]: DEBUG oslo_vmware.api [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 852.491760] env[61898]: value = "task-1240794" [ 852.491760] env[61898]: _type = "Task" [ 852.491760] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.496515] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240790, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.508682] env[61898]: DEBUG oslo_vmware.api [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240794, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.651412] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e5aff4b-74a2-4a2a-9f68-ac1e447036ff tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "4db53fdf-7107-43c5-a57c-65d54b807909" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 44.066s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.652530] env[61898]: DEBUG oslo_concurrency.lockutils [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "4db53fdf-7107-43c5-a57c-65d54b807909" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 20.519s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.653172] env[61898]: DEBUG oslo_concurrency.lockutils [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "4db53fdf-7107-43c5-a57c-65d54b807909-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.653172] env[61898]: DEBUG oslo_concurrency.lockutils [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "4db53fdf-7107-43c5-a57c-65d54b807909-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.653172] env[61898]: DEBUG oslo_concurrency.lockutils [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "4db53fdf-7107-43c5-a57c-65d54b807909-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.658360] env[61898]: INFO nova.compute.manager [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Terminating instance [ 852.678842] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240793, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.767899] env[61898]: DEBUG nova.network.neutron [req-b7042960-bc3b-461b-9b45-22251eed9c8f req-92e5809c-8905-44b9-9c31-d87d50429136 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updated VIF entry in instance network info cache for port 93f00603-54ee-451f-9579-32f82d4923b0. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 852.768369] env[61898]: DEBUG nova.network.neutron [req-b7042960-bc3b-461b-9b45-22251eed9c8f req-92e5809c-8905-44b9-9c31-d87d50429136 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updating instance_info_cache with network_info: [{"id": "93f00603-54ee-451f-9579-32f82d4923b0", "address": "fa:16:3e:45:b7:24", "network": {"id": "48f12b71-35b5-474e-bfdb-7ef68e2d7b22", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-66759866-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f368913c359420cbd16ef48aa83e27c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f4a795c-8718-4a7c-aafe-9da231df10f8", "external-id": "nsx-vlan-transportzone-162", "segmentation_id": 162, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f00603-54", "ovs_interfaceid": "93f00603-54ee-451f-9579-32f82d4923b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.784808] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.785039] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.794653] env[61898]: DEBUG oslo_vmware.api [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240792, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.887334] env[61898]: DEBUG nova.compute.manager [req-538420fe-b943-41b2-8b19-bb10b3aa6f81 req-7876d071-2238-4fe6-98e3-5dd895e6f3ab service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received event network-changed-8658c19e-7e0e-473b-a26d-7bb0da23b75f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 852.887558] env[61898]: DEBUG nova.compute.manager [req-538420fe-b943-41b2-8b19-bb10b3aa6f81 req-7876d071-2238-4fe6-98e3-5dd895e6f3ab service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Refreshing instance network info cache due to event network-changed-8658c19e-7e0e-473b-a26d-7bb0da23b75f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 852.887867] env[61898]: DEBUG oslo_concurrency.lockutils [req-538420fe-b943-41b2-8b19-bb10b3aa6f81 req-7876d071-2238-4fe6-98e3-5dd895e6f3ab service nova] Acquiring lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.888038] env[61898]: DEBUG oslo_concurrency.lockutils [req-538420fe-b943-41b2-8b19-bb10b3aa6f81 req-7876d071-2238-4fe6-98e3-5dd895e6f3ab service nova] Acquired lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.888184] env[61898]: DEBUG nova.network.neutron [req-538420fe-b943-41b2-8b19-bb10b3aa6f81 req-7876d071-2238-4fe6-98e3-5dd895e6f3ab service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Refreshing network info cache for port 8658c19e-7e0e-473b-a26d-7bb0da23b75f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 852.939323] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4732ba37-9777-4993-9ad5-01a2a1532c2f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.949972] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d27dfc-24c3-4825-8e00-c6cb19fb5e44 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.984277] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4484b550-47e7-4b5f-8b17-b059246787d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.993303] env[61898]: DEBUG oslo_vmware.api [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240790, 'name': PowerOnVM_Task, 'duration_secs': 1.03971} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.995509] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 852.995749] env[61898]: DEBUG nova.compute.manager [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 852.996653] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eedd828-27fe-445c-89e7-a0e70f5ebdd0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.000154] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1bbbc78-03d6-4424-a5bb-04aeda97b674 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.014520] env[61898]: DEBUG oslo_vmware.api [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240794, 'name': ReconfigVM_Task, 'duration_secs': 0.188201} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.024852] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6afadc12-7517-45aa-8b64-81394cf209a3 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Reconfigured VM instance to set the machine id {{(pid=61898) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 853.030017] env[61898]: DEBUG nova.compute.provider_tree [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 853.164287] env[61898]: DEBUG nova.compute.manager [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 853.164569] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 853.165020] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0baf524-09bd-4ab7-8b4a-539dc5718d9e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.180799] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb52e9f-8984-408c-b9d2-629fd6a1d395 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.191814] env[61898]: DEBUG oslo_vmware.api [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240793, 'name': PowerOnVM_Task, 'duration_secs': 0.520437} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.192333] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 853.192597] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 853.194269] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3a7399c-335d-498a-aef1-e8a1d2f562f4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.196879] env[61898]: DEBUG nova.compute.manager [None req-e10a6ffa-d541-4810-a8fb-9838ee8c4fe7 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 853.197821] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f362e3fd-1add-4846-b023-4424d17dbc5a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.208513] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 853.208513] env[61898]: value = "task-1240795" [ 853.208513] env[61898]: _type = "Task" [ 853.208513] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.222704] env[61898]: WARNING nova.virt.vmwareapi.vmops [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4db53fdf-7107-43c5-a57c-65d54b807909 could not be found. [ 853.222883] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 853.223096] env[61898]: INFO nova.compute.manager [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Took 0.06 seconds to destroy the instance on the hypervisor. [ 853.223355] env[61898]: DEBUG oslo.service.loopingcall [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.223733] env[61898]: DEBUG nova.compute.manager [-] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 853.223837] env[61898]: DEBUG nova.network.neutron [-] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 853.231398] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240795, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.271976] env[61898]: DEBUG oslo_concurrency.lockutils [req-b7042960-bc3b-461b-9b45-22251eed9c8f req-92e5809c-8905-44b9-9c31-d87d50429136 service nova] Releasing lock "refresh_cache-d6c96dce-13ae-411a-b52a-fee484718a8a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.285918] env[61898]: DEBUG oslo_vmware.api [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240792, 'name': ReconfigVM_Task, 'duration_secs': 1.454886} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.286498] env[61898]: DEBUG oslo_concurrency.lockutils [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.286754] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Reconfigured VM to attach interface {{(pid=61898) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 853.291281] env[61898]: DEBUG nova.compute.manager [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 853.539249] env[61898]: DEBUG nova.scheduler.client.report [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 853.546839] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.632101] env[61898]: DEBUG nova.network.neutron [req-538420fe-b943-41b2-8b19-bb10b3aa6f81 req-7876d071-2238-4fe6-98e3-5dd895e6f3ab service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updated VIF entry in instance network info cache for port 8658c19e-7e0e-473b-a26d-7bb0da23b75f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 853.632615] env[61898]: DEBUG nova.network.neutron [req-538420fe-b943-41b2-8b19-bb10b3aa6f81 req-7876d071-2238-4fe6-98e3-5dd895e6f3ab service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [{"id": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "address": "fa:16:3e:40:36:b4", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eab7c47-4a", "ovs_interfaceid": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bc699656-235b-4405-92f3-966811d6a509", "address": "fa:16:3e:67:08:7d", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc699656-23", "ovs_interfaceid": "bc699656-235b-4405-92f3-966811d6a509", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8658c19e-7e0e-473b-a26d-7bb0da23b75f", "address": "fa:16:3e:63:cc:4a", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8658c19e-7e", "ovs_interfaceid": "8658c19e-7e0e-473b-a26d-7bb0da23b75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.725282] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240795, 'name': PowerOffVM_Task, 'duration_secs': 0.419318} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.725644] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 853.726300] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578bf783-36a3-4f26-a974-120eee2412bd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.746515] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e979e5d2-447d-4bdb-bc79-cc95d3be4c4a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.795828] env[61898]: DEBUG oslo_concurrency.lockutils [None req-09612320-1313-489c-918d-68c15c68ce09 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-8658c19e-7e0e-473b-a26d-7bb0da23b75f" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.938s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.814744] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.990799] env[61898]: DEBUG nova.network.neutron [-] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.045387] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.907s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.047691] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.574s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.048565] env[61898]: DEBUG nova.objects.instance [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lazy-loading 'resources' on Instance uuid 5fc14058-7953-4e6a-a9ef-7933d61e9f3e {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.077866] env[61898]: INFO nova.scheduler.client.report [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleted allocations for instance eda63357-6749-4652-914a-dc5b69163eb6 [ 854.079019] env[61898]: DEBUG nova.compute.manager [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 854.082195] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249854a2-03ef-43f3-a394-361d8804635f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.136133] env[61898]: DEBUG oslo_concurrency.lockutils [req-538420fe-b943-41b2-8b19-bb10b3aa6f81 req-7876d071-2238-4fe6-98e3-5dd895e6f3ab service nova] Releasing lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.142273] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquiring lock "d6c96dce-13ae-411a-b52a-fee484718a8a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.143097] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lock "d6c96dce-13ae-411a-b52a-fee484718a8a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.143397] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquiring lock "d6c96dce-13ae-411a-b52a-fee484718a8a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.143555] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lock "d6c96dce-13ae-411a-b52a-fee484718a8a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.143791] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lock "d6c96dce-13ae-411a-b52a-fee484718a8a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.150924] env[61898]: INFO nova.compute.manager [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Terminating instance [ 854.258181] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 854.258539] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9681877b-8c5f-4951-9120-17ba45726035 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.267233] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 854.267233] env[61898]: value = "task-1240796" [ 854.267233] env[61898]: _type = "Task" [ 854.267233] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.276596] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240796, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.493104] env[61898]: INFO nova.compute.manager [-] [instance: 4db53fdf-7107-43c5-a57c-65d54b807909] Took 1.27 seconds to deallocate network for instance. [ 854.588391] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e2c36ef0-a06d-4e06-befa-3709a3c008c5 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "eda63357-6749-4652-914a-dc5b69163eb6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.794s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.594722] env[61898]: INFO nova.compute.manager [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] instance snapshotting [ 854.595368] env[61898]: DEBUG nova.objects.instance [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'flavor' on Instance uuid 7c6aad92-6e91-48fc-89ae-5ee4c89f449c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.658722] env[61898]: DEBUG nova.compute.manager [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 854.658722] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 854.658722] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354110df-b738-4901-a9e8-b2f1d387fb8a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.671567] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.671843] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98a945dc-e8a2-4ddf-be96-ebabd0254e3e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.685632] env[61898]: DEBUG oslo_vmware.api [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 854.685632] env[61898]: value = "task-1240797" [ 854.685632] env[61898]: _type = "Task" [ 854.685632] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.698510] env[61898]: DEBUG oslo_vmware.api [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240797, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.780559] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240796, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.827799] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276550bd-68be-47b8-8a8b-4150b7b82dee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.836711] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1cfe83b-6dc9-4a24-824e-38f34d9c4fe9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.871388] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06027fe-420c-4b0f-961a-2e0d5d73af80 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.879996] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1355cd88-ce1f-4123-a3c1-ae5ec01a8e26 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.899297] env[61898]: DEBUG nova.compute.provider_tree [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.992325] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.992620] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.102903] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761ec6cf-ae8d-4b26-a09e-d665f202522b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.122406] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4016d691-4f16-4d1e-a7ee-f8776d95bc9b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.158571] env[61898]: DEBUG nova.compute.manager [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 855.159566] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5beb96-c911-4a64-b4bc-d080525afa40 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.195853] env[61898]: DEBUG oslo_vmware.api [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240797, 'name': PowerOffVM_Task, 'duration_secs': 0.25049} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.196651] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.196651] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.196651] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e19e9bab-f771-4029-ab4d-f347fb698018 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.276374] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.276768] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.276891] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Deleting the datastore file [datastore2] d6c96dce-13ae-411a-b52a-fee484718a8a {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.277544] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75c8ca69-f3d7-4004-bcc0-03e3eaeca785 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.283982] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240796, 'name': CreateSnapshot_Task, 'duration_secs': 0.707131} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.285401] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 855.285795] env[61898]: DEBUG oslo_vmware.api [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for the task: (returnval){ [ 855.285795] env[61898]: value = "task-1240799" [ 855.285795] env[61898]: _type = "Task" [ 855.285795] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.286547] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464f211d-4ae3-4d30-9767-96c1ad4c9c97 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.304010] env[61898]: DEBUG oslo_vmware.api [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.402725] env[61898]: DEBUG nova.scheduler.client.report [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 855.496045] env[61898]: INFO nova.compute.manager [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Detaching volume bcbf2abe-9a1d-4f11-b843-5d7210e16392 [ 855.519575] env[61898]: DEBUG oslo_concurrency.lockutils [None req-47dbce33-afdc-4960-b343-51e871e90bcf tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "4db53fdf-7107-43c5-a57c-65d54b807909" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.867s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.533167] env[61898]: INFO nova.virt.block_device [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Attempting to driver detach volume bcbf2abe-9a1d-4f11-b843-5d7210e16392 from mountpoint /dev/sdb [ 855.533431] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Volume detach. Driver type: vmdk {{(pid=61898) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 855.533618] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267635', 'volume_id': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'name': 'volume-bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1fb4535d-47d8-45c5-b6d6-d05e57237b98', 'attached_at': '', 'detached_at': '', 'volume_id': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'serial': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 855.534925] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b4d803-da7f-4d2b-99cb-970aa12a9982 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.560132] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e131663-20ad-4689-91c0-f18f806c64c8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.570019] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24397f2f-315e-4271-9b82-e0a36242ef57 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.592818] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d58311-4f20-40f8-82e4-e9f55c63cdf2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.608102] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] The volume has not been displaced from its original location: [datastore1] volume-bcbf2abe-9a1d-4f11-b843-5d7210e16392/volume-bcbf2abe-9a1d-4f11-b843-5d7210e16392.vmdk. No consolidation needed. {{(pid=61898) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 855.613514] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Reconfiguring VM instance instance-00000034 to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 855.613860] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da22a041-878f-458e-b643-10fc676586b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.636028] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 855.636477] env[61898]: DEBUG oslo_vmware.api [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 855.636477] env[61898]: value = "task-1240800" [ 855.636477] env[61898]: _type = "Task" [ 855.636477] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.636729] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1a1c7091-4aa4-4037-9893-9a751d745721 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.647627] env[61898]: DEBUG oslo_vmware.api [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240800, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.649019] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 855.649019] env[61898]: value = "task-1240801" [ 855.649019] env[61898]: _type = "Task" [ 855.649019] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.657357] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240801, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.671704] env[61898]: INFO nova.compute.manager [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] instance snapshotting [ 855.671947] env[61898]: WARNING nova.compute.manager [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 855.674918] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2eccae-7aac-4b81-b630-284140a17d50 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.696266] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4371ec79-0cd5-4b5f-a23a-2e96b102c0fe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.800597] env[61898]: DEBUG oslo_vmware.api [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Task: {'id': task-1240799, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285064} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.801075] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.801187] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 855.801351] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.801541] env[61898]: INFO nova.compute.manager [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 855.801799] env[61898]: DEBUG oslo.service.loopingcall [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.802132] env[61898]: DEBUG nova.compute.manager [-] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 855.802132] env[61898]: DEBUG nova.network.neutron [-] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 855.811301] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 855.811734] env[61898]: DEBUG oslo_concurrency.lockutils [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.811971] env[61898]: DEBUG oslo_concurrency.lockutils [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.812302] env[61898]: DEBUG oslo_concurrency.lockutils [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.812406] env[61898]: DEBUG oslo_concurrency.lockutils [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.812541] env[61898]: DEBUG oslo_concurrency.lockutils [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.814210] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b4a5cef6-91a0-4bf0-abd9-287e8edb00ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.821144] env[61898]: INFO nova.compute.manager [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Terminating instance [ 855.840474] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 855.840474] env[61898]: value = "task-1240802" [ 855.840474] env[61898]: _type = "Task" [ 855.840474] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.853980] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240802, 'name': CloneVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.909201] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.861s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.912800] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.717s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.913555] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.913810] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 855.914191] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.010s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.914440] env[61898]: DEBUG nova.objects.instance [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lazy-loading 'resources' on Instance uuid 4c744673-0d9b-44ef-938f-372b101a2053 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.916419] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e614a9-a602-48d9-a3bc-798d1997eeb4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.927126] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f48e8f9-c3db-4ae7-bd7e-42f49bf926bd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.949581] env[61898]: INFO nova.scheduler.client.report [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleted allocations for instance 5fc14058-7953-4e6a-a9ef-7933d61e9f3e [ 855.951498] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362590f7-2c1d-41e1-a75c-c7e63d47d0e2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.966297] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2251668c-aa9e-47be-940a-5cccef004faa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.006503] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180383MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 856.007132] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.069953] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "80931b22-a69b-41cd-b707-13bf11111b88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.070195] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "80931b22-a69b-41cd-b707-13bf11111b88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.150765] env[61898]: DEBUG oslo_vmware.api [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240800, 'name': ReconfigVM_Task, 'duration_secs': 0.294681} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.154134] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Reconfigured VM instance instance-00000034 to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 856.161651] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecdf8f55-7778-4e1f-933a-235d7ea7cca8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.179476] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240801, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.180995] env[61898]: DEBUG oslo_vmware.api [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 856.180995] env[61898]: value = "task-1240803" [ 856.180995] env[61898]: _type = "Task" [ 856.180995] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.189779] env[61898]: DEBUG oslo_vmware.api [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240803, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.208661] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 856.209063] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1cf3ea77-ad02-4451-97a7-a6c5a369ec7c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.217975] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 856.217975] env[61898]: value = "task-1240804" [ 856.217975] env[61898]: _type = "Task" [ 856.217975] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.227917] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240804, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.328100] env[61898]: DEBUG nova.compute.manager [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 856.328517] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 856.329961] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ff06ae-28d4-4dcc-9621-8e61a1004b56 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.343474] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 856.347311] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bee28efe-697c-495c-8551-29433a2ad3a4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.357239] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240802, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.358576] env[61898]: DEBUG oslo_vmware.api [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 856.358576] env[61898]: value = "task-1240805" [ 856.358576] env[61898]: _type = "Task" [ 856.358576] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.370905] env[61898]: DEBUG oslo_vmware.api [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240805, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.469123] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a20ce80-c108-46b0-b9b0-e80200e97d77 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "5fc14058-7953-4e6a-a9ef-7933d61e9f3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.425s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.546481] env[61898]: DEBUG oslo_concurrency.lockutils [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-bc699656-235b-4405-92f3-966811d6a509" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.546846] env[61898]: DEBUG oslo_concurrency.lockutils [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-bc699656-235b-4405-92f3-966811d6a509" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.560231] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.560526] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 856.572675] env[61898]: DEBUG nova.compute.manager [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 856.666484] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240801, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.696561] env[61898]: DEBUG oslo_vmware.api [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240803, 'name': ReconfigVM_Task, 'duration_secs': 0.161681} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.697146] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267635', 'volume_id': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'name': 'volume-bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1fb4535d-47d8-45c5-b6d6-d05e57237b98', 'attached_at': '', 'detached_at': '', 'volume_id': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392', 'serial': 'bcbf2abe-9a1d-4f11-b843-5d7210e16392'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 856.733201] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240804, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.735068] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182819c9-2185-4756-92ad-c47b904399d9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.743889] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180fc9c8-2827-4dc5-a37e-7c058e736c8c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.781436] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322adf4d-2fae-4881-ba2a-6657191de41b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.791147] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808939ef-7b36-42aa-8f63-ce930c82b3eb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.807042] env[61898]: DEBUG nova.compute.provider_tree [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.853843] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240802, 'name': CloneVM_Task} progress is 95%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.867666] env[61898]: DEBUG oslo_vmware.api [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240805, 'name': PowerOffVM_Task, 'duration_secs': 0.383142} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.867952] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 856.868143] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 856.868399] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8280f07-95e9-43ec-b1f9-0feb18084ca2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.949113] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 856.949535] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 856.949815] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleting the datastore file [datastore1] 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 856.950106] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d17ee46-5eb0-4e8c-9d44-65f89d828fa1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.961223] env[61898]: DEBUG oslo_vmware.api [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 856.961223] env[61898]: value = "task-1240807" [ 856.961223] env[61898]: _type = "Task" [ 856.961223] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.972260] env[61898]: DEBUG oslo_vmware.api [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.055767] env[61898]: DEBUG oslo_concurrency.lockutils [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.056078] env[61898]: DEBUG oslo_concurrency.lockutils [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.057014] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08605bd0-c1b4-4135-899c-df17d037b4e5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.065777] env[61898]: DEBUG nova.compute.manager [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 857.090261] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ac8b47-1b1a-41f2-8578-0f4bec7dcd89 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.098426] env[61898]: DEBUG nova.compute.manager [req-67616ea8-d2b6-44a0-8576-bd5f7900b50c req-576d70e3-fb26-4b93-82fa-95d966ff2927 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Received event network-vif-deleted-93f00603-54ee-451f-9579-32f82d4923b0 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 857.098765] env[61898]: INFO nova.compute.manager [req-67616ea8-d2b6-44a0-8576-bd5f7900b50c req-576d70e3-fb26-4b93-82fa-95d966ff2927 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Neutron deleted interface 93f00603-54ee-451f-9579-32f82d4923b0; detaching it from the instance and deleting it from the info cache [ 857.099066] env[61898]: DEBUG nova.network.neutron [req-67616ea8-d2b6-44a0-8576-bd5f7900b50c req-576d70e3-fb26-4b93-82fa-95d966ff2927 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.133390] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Reconfiguring VM to detach interface {{(pid=61898) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 857.134543] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.135308] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d722f88c-ee45-49f3-a0ee-87d46b008f44 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.149232] env[61898]: DEBUG nova.network.neutron [-] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.160123] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 857.160123] env[61898]: value = "task-1240808" [ 857.160123] env[61898]: _type = "Task" [ 857.160123] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.167924] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240801, 'name': CreateSnapshot_Task, 'duration_secs': 1.044734} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.168612] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 857.169357] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65deec3a-0533-41b9-8f89-c654f48bf289 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.175486] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.231162] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240804, 'name': CreateSnapshot_Task, 'duration_secs': 0.568638} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.231349] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 857.232476] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdccdba3-8aa1-4745-96b6-2c2b3b6737dc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.246457] env[61898]: DEBUG nova.objects.instance [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'flavor' on Instance uuid 1fb4535d-47d8-45c5-b6d6-d05e57237b98 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.310029] env[61898]: DEBUG nova.scheduler.client.report [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 857.354281] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240802, 'name': CloneVM_Task, 'duration_secs': 1.170571} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.354586] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Created linked-clone VM from snapshot [ 857.355376] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d87f30-5b75-406b-8ac4-ecb1cc08c1dd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.365057] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Uploading image 5d955d84-2f56-40e7-a5a1-0f6937a182cf {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 857.394921] env[61898]: DEBUG oslo_vmware.rw_handles [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 857.394921] env[61898]: value = "vm-267661" [ 857.394921] env[61898]: _type = "VirtualMachine" [ 857.394921] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 857.395873] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-22f44d8c-adfe-4d82-b183-a7ad5bb6659c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.405707] env[61898]: DEBUG oslo_vmware.rw_handles [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lease: (returnval){ [ 857.405707] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5228b3db-8652-2386-a67b-558029c7c57b" [ 857.405707] env[61898]: _type = "HttpNfcLease" [ 857.405707] env[61898]: } obtained for exporting VM: (result){ [ 857.405707] env[61898]: value = "vm-267661" [ 857.405707] env[61898]: _type = "VirtualMachine" [ 857.405707] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 857.406191] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the lease: (returnval){ [ 857.406191] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5228b3db-8652-2386-a67b-558029c7c57b" [ 857.406191] env[61898]: _type = "HttpNfcLease" [ 857.406191] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 857.413207] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 857.413207] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5228b3db-8652-2386-a67b-558029c7c57b" [ 857.413207] env[61898]: _type = "HttpNfcLease" [ 857.413207] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 857.471483] env[61898]: DEBUG oslo_vmware.api [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.437678} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.471758] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 857.471954] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 857.472155] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 857.472347] env[61898]: INFO nova.compute.manager [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 857.473027] env[61898]: DEBUG oslo.service.loopingcall [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.473027] env[61898]: DEBUG nova.compute.manager [-] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 857.473027] env[61898]: DEBUG nova.network.neutron [-] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 857.524233] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.524510] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.569503] env[61898]: INFO nova.compute.manager [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Rebuilding instance [ 857.605976] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c373cbaf-c9ca-4670-af05-8cf396879676 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.614300] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.622130] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.622928] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.627585] env[61898]: DEBUG nova.compute.manager [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 857.630747] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f2b379-cb39-4026-aef2-8f39990faf46 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.646026] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3060b266-9258-411b-910d-917566f229b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.652151] env[61898]: INFO nova.compute.manager [-] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Took 1.85 seconds to deallocate network for instance. [ 857.657731] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "cdd5f647-2c43-4389-820d-2d39d7d20889" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.657966] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "cdd5f647-2c43-4389-820d-2d39d7d20889" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.682299] env[61898]: DEBUG nova.compute.manager [req-67616ea8-d2b6-44a0-8576-bd5f7900b50c req-576d70e3-fb26-4b93-82fa-95d966ff2927 service nova] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Detach interface failed, port_id=93f00603-54ee-451f-9579-32f82d4923b0, reason: Instance d6c96dce-13ae-411a-b52a-fee484718a8a could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 857.691015] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 857.695956] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3356d881-0e27-40cd-8d5c-5f6f5030c64e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.699479] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.708350] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 857.708350] env[61898]: value = "task-1240810" [ 857.708350] env[61898]: _type = "Task" [ 857.708350] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.717971] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240810, 'name': CloneVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.751510] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 857.753840] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-34f89c54-79f4-4f9e-9f40-771889aa2052 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.764423] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 857.764423] env[61898]: value = "task-1240811" [ 857.764423] env[61898]: _type = "Task" [ 857.764423] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.773704] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240811, 'name': CloneVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.815281] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.901s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.818429] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 4.271s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.818429] env[61898]: DEBUG nova.objects.instance [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61898) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 857.845797] env[61898]: INFO nova.scheduler.client.report [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Deleted allocations for instance 4c744673-0d9b-44ef-938f-372b101a2053 [ 857.914865] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 857.914865] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5228b3db-8652-2386-a67b-558029c7c57b" [ 857.914865] env[61898]: _type = "HttpNfcLease" [ 857.914865] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 857.915855] env[61898]: DEBUG oslo_vmware.rw_handles [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 857.915855] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5228b3db-8652-2386-a67b-558029c7c57b" [ 857.915855] env[61898]: _type = "HttpNfcLease" [ 857.915855] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 857.916138] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa4f915-ece9-42cb-b8dd-ae2ac45aced7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.924824] env[61898]: DEBUG oslo_vmware.rw_handles [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526a41cc-7b26-ceec-aa3a-ad12995c5a7c/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 857.925219] env[61898]: DEBUG oslo_vmware.rw_handles [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526a41cc-7b26-ceec-aa3a-ad12995c5a7c/disk-0.vmdk for reading. {{(pid=61898) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 858.027621] env[61898]: DEBUG nova.compute.manager [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 858.068441] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-e6564a0e-6fb8-433f-a9e1-756f2892b25b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.129360] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 858.164142] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 858.165412] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.176921] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.213232] env[61898]: DEBUG oslo_concurrency.lockutils [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.222727] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240810, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.259893] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d5cbd3a-cdd2-4c1a-bb3c-a6a428315963 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.267s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.263021] env[61898]: DEBUG oslo_concurrency.lockutils [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.052s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.263021] env[61898]: DEBUG nova.compute.manager [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 858.263781] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd51ee4-d204-475e-94b0-6fb8e5fe8471 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.278382] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240811, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.281157] env[61898]: DEBUG nova.compute.manager [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61898) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 858.282405] env[61898]: DEBUG nova.objects.instance [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'flavor' on Instance uuid 1fb4535d-47d8-45c5-b6d6-d05e57237b98 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 858.355713] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77d26bb2-66b3-4e53-a821-794ca29945e9 tempest-FloatingIPsAssociationTestJSON-1458358645 tempest-FloatingIPsAssociationTestJSON-1458358645-project-member] Lock "4c744673-0d9b-44ef-938f-372b101a2053" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.914s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.556443] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.585212] env[61898]: DEBUG nova.network.neutron [-] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.653315] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.684022] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 858.684022] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ce859ba-f77d-412b-b34f-e4b67c822f1b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.697773] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.697847] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.700875] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 858.700875] env[61898]: value = "task-1240812" [ 858.700875] env[61898]: _type = "Task" [ 858.700875] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.712503] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.724039] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240810, 'name': CloneVM_Task} progress is 95%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.780669] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240811, 'name': CloneVM_Task} progress is 95%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.827703] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3761353-e801-48ad-a9a1-f15a5466565f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.829048] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.014s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.831916] env[61898]: INFO nova.compute.claims [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.088370] env[61898]: INFO nova.compute.manager [-] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Took 1.62 seconds to deallocate network for instance. [ 859.136039] env[61898]: DEBUG nova.compute.manager [req-c9a1cd21-5b71-490b-9cb0-8b37779ca4ae req-fd69ccea-41d7-4068-bb2c-32aff207d8c9 service nova] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Received event network-vif-deleted-5650e9db-397e-427c-903b-85817fe18e52 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 859.188700] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.213239] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240812, 'name': PowerOffVM_Task, 'duration_secs': 0.231527} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.217617] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.218272] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 859.219820] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623445da-86df-435f-b07c-ccb3e1a5a9df {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.232265] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240810, 'name': CloneVM_Task, 'duration_secs': 1.354821} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.234799] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Created linked-clone VM from snapshot [ 859.235268] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 859.236108] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895d4eba-c492-4d11-9a49-b0b6ad9f0153 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.238806] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d27db42-fc71-45d6-8cdc-6296078e6410 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.246941] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Uploading image 7dc93144-107e-4f85-9947-21db0434e8b3 {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 859.276095] env[61898]: DEBUG oslo_vmware.rw_handles [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 859.276095] env[61898]: value = "vm-267664" [ 859.276095] env[61898]: _type = "VirtualMachine" [ 859.276095] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 859.276786] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c9a625b8-0934-4aac-b7c0-f07199e3e1aa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.282404] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240811, 'name': CloneVM_Task, 'duration_secs': 1.234416} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.283201] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Created linked-clone VM from snapshot [ 859.284446] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b7fef24-b20d-4753-b070-aa6982739202 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.289025] env[61898]: DEBUG oslo_vmware.rw_handles [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lease: (returnval){ [ 859.289025] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52dffa34-ba24-bab9-1ec0-392d135fee81" [ 859.289025] env[61898]: _type = "HttpNfcLease" [ 859.289025] env[61898]: } obtained for exporting VM: (result){ [ 859.289025] env[61898]: value = "vm-267664" [ 859.289025] env[61898]: _type = "VirtualMachine" [ 859.289025] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 859.289619] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the lease: (returnval){ [ 859.289619] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52dffa34-ba24-bab9-1ec0-392d135fee81" [ 859.289619] env[61898]: _type = "HttpNfcLease" [ 859.289619] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 859.294113] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 859.294581] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Uploading image 65267edc-4683-4fb8-a756-527eb335a46b {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 859.301826] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61a0e04c-7e26-46ab-ad8a-916e2aca9a9f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.308698] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 859.308698] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52dffa34-ba24-bab9-1ec0-392d135fee81" [ 859.308698] env[61898]: _type = "HttpNfcLease" [ 859.308698] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 859.313899] env[61898]: DEBUG oslo_vmware.api [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 859.313899] env[61898]: value = "task-1240815" [ 859.313899] env[61898]: _type = "Task" [ 859.313899] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.318244] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 859.319287] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 859.319287] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleting the datastore file [datastore1] 1aa03975-f18f-4e64-836e-e991b73ee9d5 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 859.325209] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a11b534-62a1-42f7-8398-130e3ba6e9b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.330070] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 859.330070] env[61898]: value = "vm-267665" [ 859.330070] env[61898]: _type = "VirtualMachine" [ 859.330070] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 859.330484] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6a1b9783-99d2-431b-ad10-85ce5bf21ba9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.342616] env[61898]: DEBUG oslo_vmware.api [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240815, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.349020] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 859.349020] env[61898]: value = "task-1240816" [ 859.349020] env[61898]: _type = "Task" [ 859.349020] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.349020] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lease: (returnval){ [ 859.349020] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e99dfa-4bc1-25f8-97fa-72171a995d50" [ 859.349020] env[61898]: _type = "HttpNfcLease" [ 859.349020] env[61898]: } obtained for exporting VM: (result){ [ 859.349020] env[61898]: value = "vm-267665" [ 859.349020] env[61898]: _type = "VirtualMachine" [ 859.349020] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 859.349020] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the lease: (returnval){ [ 859.349020] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e99dfa-4bc1-25f8-97fa-72171a995d50" [ 859.349020] env[61898]: _type = "HttpNfcLease" [ 859.349020] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 859.361675] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240816, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.366120] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 859.366120] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e99dfa-4bc1-25f8-97fa-72171a995d50" [ 859.366120] env[61898]: _type = "HttpNfcLease" [ 859.366120] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 859.366120] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 859.366120] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e99dfa-4bc1-25f8-97fa-72171a995d50" [ 859.366120] env[61898]: _type = "HttpNfcLease" [ 859.366120] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 859.366120] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52a4591-43c8-475f-8dbc-a398f83ac34b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.375319] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5282666e-43d0-73ce-0434-670a148a1ffc/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 859.375657] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5282666e-43d0-73ce-0434-670a148a1ffc/disk-0.vmdk for reading. {{(pid=61898) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 859.492218] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a77c7b58-ee8e-4fa7-a925-f6975c7b3da9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.608561] env[61898]: DEBUG oslo_concurrency.lockutils [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.689296] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.802958] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 859.802958] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52dffa34-ba24-bab9-1ec0-392d135fee81" [ 859.802958] env[61898]: _type = "HttpNfcLease" [ 859.802958] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 859.803694] env[61898]: DEBUG oslo_vmware.rw_handles [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 859.803694] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52dffa34-ba24-bab9-1ec0-392d135fee81" [ 859.803694] env[61898]: _type = "HttpNfcLease" [ 859.803694] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 859.804382] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e97502-edaf-4725-9af5-fb8a4cd2f602 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.813930] env[61898]: DEBUG oslo_vmware.rw_handles [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb0d0a-0864-62a1-e0d6-29a9be4ce249/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 859.814251] env[61898]: DEBUG oslo_vmware.rw_handles [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb0d0a-0864-62a1-e0d6-29a9be4ce249/disk-0.vmdk for reading. {{(pid=61898) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 859.901032] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240816, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.274288} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.904664] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 859.904954] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 859.905580] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 859.908638] env[61898]: DEBUG oslo_vmware.api [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240815, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.956385] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ac5b7a89-e9f4-4577-abe9-71f88fb3aa85 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.188587] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.273633] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdc5bb4-aab3-446e-b00b-a5785c70734d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.284022] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817755a3-eb35-490d-88a4-ce44c6863f08 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.324434] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfde9195-bba0-46a0-93ff-f363dc07bd67 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.336319] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f22ffe3-8cc6-44f0-bcb3-e4d62a18cc68 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.353431] env[61898]: DEBUG nova.compute.provider_tree [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.394119] env[61898]: DEBUG oslo_vmware.api [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240815, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.689364] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.856518] env[61898]: DEBUG nova.scheduler.client.report [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 860.893666] env[61898]: DEBUG oslo_vmware.api [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240815, 'name': PowerOffVM_Task, 'duration_secs': 1.206385} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.893889] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 860.894099] env[61898]: DEBUG nova.compute.manager [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 860.894921] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ced680-6320-4783-ac89-f15225abbc75 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.189990] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.361418] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.532s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.362038] env[61898]: DEBUG nova.compute.manager [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 861.364845] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 5.358s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.411450] env[61898]: DEBUG oslo_concurrency.lockutils [None req-05162a17-1f39-4b52-80c9-1bc8a7577850 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 3.150s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.691540] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.869446] env[61898]: DEBUG nova.compute.utils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 861.880457] env[61898]: DEBUG nova.compute.manager [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 861.880676] env[61898]: DEBUG nova.network.neutron [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 861.924693] env[61898]: DEBUG nova.policy [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f25c631adf3c4f68b374a35b767a9429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30bd396aa1ff45ad946bc1a6fdb3b40b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 862.182725] env[61898]: DEBUG nova.network.neutron [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Successfully created port: 55523e4c-177b-4148-9eee-b51554447d0d {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 862.196064] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.381849] env[61898]: DEBUG nova.compute.manager [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 862.420398] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 1fb4535d-47d8-45c5-b6d6-d05e57237b98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.421268] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 1aa03975-f18f-4e64-836e-e991b73ee9d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.421268] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance aab10d8f-0d25-4351-a627-7222be63895e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.421268] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 45b8dc91-b577-4548-bf3a-32c7c936c616 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.421268] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 320577e5-f197-4f66-a94f-9b9ba2479325 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.421268] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance e851d73d-58f0-486a-a95c-70d07e5faad2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.421496] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 7c6aad92-6e91-48fc-89ae-5ee4c89f449c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.421531] env[61898]: WARNING nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance d6c96dce-13ae-411a-b52a-fee484718a8a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 862.422742] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 070bc0cc-ff77-48b8-bd08-f17fe69e25af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.422742] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 86367a82-239b-4f6e-b306-d9661eadf95e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.422742] env[61898]: WARNING nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 862.422742] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 9b7b9962-fda1-46af-9ecc-ea5b352d5193 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.422742] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance e5c38d18-18e4-47dc-8445-71d3dc0c325a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.422742] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.422742] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 01685478-9d68-4edd-8dff-7d63fcd8bcd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 862.693438] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.925594] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 80931b22-a69b-41cd-b707-13bf11111b88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 863.195496] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.398576] env[61898]: DEBUG nova.compute.manager [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 863.429140] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 5323b250-fad8-4d71-81ed-c5e5eeb8aeab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 863.697893] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.729311] env[61898]: DEBUG nova.network.neutron [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Successfully updated port: 55523e4c-177b-4148-9eee-b51554447d0d {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 863.934124] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 9afa94d2-16a1-484f-96b4-8bbd93829ffe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.196451] env[61898]: DEBUG oslo_vmware.api [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240808, 'name': ReconfigVM_Task, 'duration_secs': 6.819403} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.196906] env[61898]: DEBUG oslo_concurrency.lockutils [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.197196] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Reconfigured VM to detach interface {{(pid=61898) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 864.233793] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "refresh_cache-01685478-9d68-4edd-8dff-7d63fcd8bcd3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.233911] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "refresh_cache-01685478-9d68-4edd-8dff-7d63fcd8bcd3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.234122] env[61898]: DEBUG nova.network.neutron [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 864.437316] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.770080] env[61898]: DEBUG nova.network.neutron [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 864.910653] env[61898]: DEBUG nova.network.neutron [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Updating instance_info_cache with network_info: [{"id": "55523e4c-177b-4148-9eee-b51554447d0d", "address": "fa:16:3e:80:c2:39", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55523e4c-17", "ovs_interfaceid": "55523e4c-177b-4148-9eee-b51554447d0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.941251] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance cdd5f647-2c43-4389-820d-2d39d7d20889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 864.941516] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 864.941693] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 865.224320] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f78e485-34b4-4636-9853-a29c8efbade6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.233010] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aeedff4-1867-47ed-911a-b59ac7db2fd6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.267102] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96f9b8d-6335-4dea-b5a3-5b4270b20e8c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.276082] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12daa9e1-5260-4508-9e4d-b96d842b305b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.292845] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.413933] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "refresh_cache-01685478-9d68-4edd-8dff-7d63fcd8bcd3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.414892] env[61898]: DEBUG nova.compute.manager [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Instance network_info: |[{"id": "55523e4c-177b-4148-9eee-b51554447d0d", "address": "fa:16:3e:80:c2:39", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55523e4c-17", "ovs_interfaceid": "55523e4c-177b-4148-9eee-b51554447d0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 865.502864] env[61898]: DEBUG oslo_concurrency.lockutils [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.503176] env[61898]: DEBUG oslo_concurrency.lockutils [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.503469] env[61898]: DEBUG nova.network.neutron [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.751502] env[61898]: DEBUG nova.virt.hardware [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 865.751828] env[61898]: DEBUG nova.virt.hardware [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 865.752062] env[61898]: DEBUG nova.virt.hardware [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.752308] env[61898]: DEBUG nova.virt.hardware [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 865.752558] env[61898]: DEBUG nova.virt.hardware [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.752777] env[61898]: DEBUG nova.virt.hardware [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 865.753110] env[61898]: DEBUG nova.virt.hardware [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 865.753355] env[61898]: DEBUG nova.virt.hardware [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 865.753573] env[61898]: DEBUG nova.virt.hardware [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 865.753788] env[61898]: DEBUG nova.virt.hardware [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 865.754088] env[61898]: DEBUG nova.virt.hardware [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.756601] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0525a8ab-07b0-47c3-96fe-ad05154beb36 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.768564] env[61898]: DEBUG nova.virt.hardware [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 865.768946] env[61898]: DEBUG nova.virt.hardware [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 865.769293] env[61898]: DEBUG nova.virt.hardware [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.769429] env[61898]: DEBUG nova.virt.hardware [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 865.769626] env[61898]: DEBUG nova.virt.hardware [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.769835] env[61898]: DEBUG nova.virt.hardware [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 865.770070] env[61898]: DEBUG nova.virt.hardware [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 865.770240] env[61898]: DEBUG nova.virt.hardware [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 865.770495] env[61898]: DEBUG nova.virt.hardware [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 865.770756] env[61898]: DEBUG nova.virt.hardware [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 865.770756] env[61898]: DEBUG nova.virt.hardware [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.772068] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdeff34-6539-41b8-9809-5b338b71e9eb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.778542] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473461cb-3e34-4c00-836e-cf0f4d8a96f3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.788934] env[61898]: DEBUG oslo_vmware.rw_handles [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526a41cc-7b26-ceec-aa3a-ad12995c5a7c/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 865.797950] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e25f6ea-7a00-4d3c-ac03-aa9ed3413194 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.804351] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93408682-c795-41bc-a35f-2b167fd2e5be {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.806947] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 865.810439] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:5d:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ddb06f4c-13ed-4322-b1e8-f4022b32e4f4', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.818483] env[61898]: DEBUG oslo.service.loopingcall [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.819090] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.819976] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5795165-5ba3-40de-bc74-c7d2afddf9bc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.850907] env[61898]: DEBUG oslo_vmware.rw_handles [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526a41cc-7b26-ceec-aa3a-ad12995c5a7c/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 865.851134] env[61898]: ERROR oslo_vmware.rw_handles [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526a41cc-7b26-ceec-aa3a-ad12995c5a7c/disk-0.vmdk due to incomplete transfer. [ 865.852009] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:c2:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4223acd2-30f7-440e-b975-60b30d931694', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55523e4c-177b-4148-9eee-b51554447d0d', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.859328] env[61898]: DEBUG oslo.service.loopingcall [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.860579] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6ac45351-14bb-4e41-9216-74f46c2c6d37 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.862755] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.862964] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00b9e9ed-3d1d-4a49-94c9-1009c870b5cd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.879574] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.879574] env[61898]: value = "task-1240818" [ 865.879574] env[61898]: _type = "Task" [ 865.879574] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.884897] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.884897] env[61898]: value = "task-1240819" [ 865.884897] env[61898]: _type = "Task" [ 865.884897] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.889517] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240818, 'name': CreateVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.889822] env[61898]: DEBUG oslo_vmware.rw_handles [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526a41cc-7b26-ceec-aa3a-ad12995c5a7c/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 865.890025] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Uploaded image 5d955d84-2f56-40e7-a5a1-0f6937a182cf to the Glance image server {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 865.892190] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 865.895361] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-62832154-ae4f-4e95-b181-dc5903efea7f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.903627] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240819, 'name': CreateVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.905376] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 865.905376] env[61898]: value = "task-1240820" [ 865.905376] env[61898]: _type = "Task" [ 865.905376] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.917028] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240820, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.320998] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 866.321274] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.956s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.322013] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.187s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.326564] env[61898]: INFO nova.compute.claims [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 866.403187] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240818, 'name': CreateVM_Task, 'duration_secs': 0.492709} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.407050] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.408903] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.409253] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.409667] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.414100] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ff5af5c-4c47-44ef-bc3d-7a15c5934479 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.420966] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240819, 'name': CreateVM_Task, 'duration_secs': 0.473627} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.425533] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 866.425993] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240820, 'name': Destroy_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.426528] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 866.426528] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e39369-e192-c7a6-a81d-36d738418cf6" [ 866.426528] env[61898]: _type = "Task" [ 866.426528] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.427195] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.437798] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e39369-e192-c7a6-a81d-36d738418cf6, 'name': SearchDatastore_Task, 'duration_secs': 0.011832} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.438345] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.438639] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.438929] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.439530] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.439530] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.440079] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.440358] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 866.440690] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ffe40b3-bcc4-4101-98f7-a90812914c4c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.443231] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e5d9dda-6bc9-41a2-94fb-da2ce91fca1e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.449075] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 866.449075] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52cd9451-e453-d25a-c102-36a481de3e8b" [ 866.449075] env[61898]: _type = "Task" [ 866.449075] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.455410] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.455702] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.461655] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbb38668-df9f-446f-824f-7dd0a4d90dac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.463200] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cd9451-e453-d25a-c102-36a481de3e8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.468332] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 866.468332] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ce7569-85cd-17f0-1feb-cc3ac7ab3628" [ 866.468332] env[61898]: _type = "Task" [ 866.468332] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.478277] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ce7569-85cd-17f0-1feb-cc3ac7ab3628, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.574191] env[61898]: INFO nova.network.neutron [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Port bc699656-235b-4405-92f3-966811d6a509 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 866.590879] env[61898]: DEBUG nova.compute.manager [req-67e5faa7-35c2-43ae-8a57-32c31d72aea9 req-303e2974-8720-42ec-93d9-311a48bc0b5d service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Received event network-vif-plugged-55523e4c-177b-4148-9eee-b51554447d0d {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 866.591181] env[61898]: DEBUG oslo_concurrency.lockutils [req-67e5faa7-35c2-43ae-8a57-32c31d72aea9 req-303e2974-8720-42ec-93d9-311a48bc0b5d service nova] Acquiring lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.591433] env[61898]: DEBUG oslo_concurrency.lockutils [req-67e5faa7-35c2-43ae-8a57-32c31d72aea9 req-303e2974-8720-42ec-93d9-311a48bc0b5d service nova] Lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.591614] env[61898]: DEBUG oslo_concurrency.lockutils [req-67e5faa7-35c2-43ae-8a57-32c31d72aea9 req-303e2974-8720-42ec-93d9-311a48bc0b5d service nova] Lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.591961] env[61898]: DEBUG nova.compute.manager [req-67e5faa7-35c2-43ae-8a57-32c31d72aea9 req-303e2974-8720-42ec-93d9-311a48bc0b5d service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] No waiting events found dispatching network-vif-plugged-55523e4c-177b-4148-9eee-b51554447d0d {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 866.592148] env[61898]: WARNING nova.compute.manager [req-67e5faa7-35c2-43ae-8a57-32c31d72aea9 req-303e2974-8720-42ec-93d9-311a48bc0b5d service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Received unexpected event network-vif-plugged-55523e4c-177b-4148-9eee-b51554447d0d for instance with vm_state building and task_state spawning. [ 866.803873] env[61898]: DEBUG nova.network.neutron [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [{"id": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "address": "fa:16:3e:40:36:b4", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eab7c47-4a", "ovs_interfaceid": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8658c19e-7e0e-473b-a26d-7bb0da23b75f", "address": "fa:16:3e:63:cc:4a", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8658c19e-7e", "ovs_interfaceid": "8658c19e-7e0e-473b-a26d-7bb0da23b75f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.819243] env[61898]: DEBUG nova.compute.manager [req-621c053b-3c48-419c-ab4e-ed95774d4699 req-f2530535-325a-4873-9dcd-cf7e2552dcec service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received event network-vif-deleted-8658c19e-7e0e-473b-a26d-7bb0da23b75f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 866.819243] env[61898]: INFO nova.compute.manager [req-621c053b-3c48-419c-ab4e-ed95774d4699 req-f2530535-325a-4873-9dcd-cf7e2552dcec service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Neutron deleted interface 8658c19e-7e0e-473b-a26d-7bb0da23b75f; detaching it from the instance and deleting it from the info cache [ 866.819322] env[61898]: DEBUG nova.network.neutron [req-621c053b-3c48-419c-ab4e-ed95774d4699 req-f2530535-325a-4873-9dcd-cf7e2552dcec service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [{"id": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "address": "fa:16:3e:40:36:b4", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eab7c47-4a", "ovs_interfaceid": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bc699656-235b-4405-92f3-966811d6a509", "address": "fa:16:3e:67:08:7d", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc699656-23", "ovs_interfaceid": "bc699656-235b-4405-92f3-966811d6a509", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.922891] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240820, 'name': Destroy_Task, 'duration_secs': 0.577602} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.923343] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Destroyed the VM [ 866.923678] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 866.924148] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2ea6f815-a456-44be-9431-f8fcb50fb8d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.933861] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 866.933861] env[61898]: value = "task-1240821" [ 866.933861] env[61898]: _type = "Task" [ 866.933861] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.947982] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240821, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.963742] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cd9451-e453-d25a-c102-36a481de3e8b, 'name': SearchDatastore_Task, 'duration_secs': 0.014249} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.964769] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.965264] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.965744] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.982622] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ce7569-85cd-17f0-1feb-cc3ac7ab3628, 'name': SearchDatastore_Task, 'duration_secs': 0.012179} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.983922] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0eb686d4-d3c3-4831-94f2-9e625ca172d3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.992256] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 866.992256] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52684ab1-a7e9-9898-39ab-2a90449f783c" [ 866.992256] env[61898]: _type = "Task" [ 866.992256] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.005766] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52684ab1-a7e9-9898-39ab-2a90449f783c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.164132] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5282666e-43d0-73ce-0434-670a148a1ffc/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 867.165373] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0d17de-4b5d-4cae-b046-d855af882e03 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.175219] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5282666e-43d0-73ce-0434-670a148a1ffc/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 867.175219] env[61898]: ERROR oslo_vmware.rw_handles [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5282666e-43d0-73ce-0434-670a148a1ffc/disk-0.vmdk due to incomplete transfer. [ 867.175219] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0defcfa3-d111-4a16-b9a3-d423adc37f03 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.184513] env[61898]: DEBUG oslo_vmware.rw_handles [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5282666e-43d0-73ce-0434-670a148a1ffc/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 867.184817] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Uploaded image 65267edc-4683-4fb8-a756-527eb335a46b to the Glance image server {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 867.186807] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 867.187207] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-404a37d5-b7d4-48d6-b302-424f23631df4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.194929] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 867.194929] env[61898]: value = "task-1240822" [ 867.194929] env[61898]: _type = "Task" [ 867.194929] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.204976] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240822, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.215523] env[61898]: DEBUG nova.objects.instance [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'flavor' on Instance uuid 1fb4535d-47d8-45c5-b6d6-d05e57237b98 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 867.290584] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "e851d73d-58f0-486a-a95c-70d07e5faad2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.291147] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.291597] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 867.292046] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.292432] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.294787] env[61898]: INFO nova.compute.manager [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Terminating instance [ 867.306672] env[61898]: DEBUG oslo_concurrency.lockutils [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.322412] env[61898]: DEBUG oslo_concurrency.lockutils [req-621c053b-3c48-419c-ab4e-ed95774d4699 req-f2530535-325a-4873-9dcd-cf7e2552dcec service nova] Acquiring lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.453154] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240821, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.505374] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52684ab1-a7e9-9898-39ab-2a90449f783c, 'name': SearchDatastore_Task, 'duration_secs': 0.011083} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.505923] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.506310] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.506651] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.506892] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.507187] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c33125a-c9b2-483e-a053-6304051f65da {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.513409] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb6071bd-a671-4009-86c4-2260df70a747 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.523568] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 867.523568] env[61898]: value = "task-1240823" [ 867.523568] env[61898]: _type = "Task" [ 867.523568] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.531709] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.531990] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 867.533532] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c526c0a-61c3-435e-9a41-15ee3ee11c18 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.540138] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.544991] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 867.544991] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521e13ba-76d0-e47f-9d52-7a1448532263" [ 867.544991] env[61898]: _type = "Task" [ 867.544991] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.560267] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521e13ba-76d0-e47f-9d52-7a1448532263, 'name': SearchDatastore_Task, 'duration_secs': 0.011957} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.561227] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37f7a923-255c-44b7-a8b3-2767312a99fa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.572067] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 867.572067] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d2f19c-4e8a-2e7e-dc12-61ef809c7fdd" [ 867.572067] env[61898]: _type = "Task" [ 867.572067] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.589151] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d2f19c-4e8a-2e7e-dc12-61ef809c7fdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.689214] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b671b7-7e83-4004-9db4-8e690bc40101 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.704030] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1cb139-09d4-48dc-b475-175a4d1e471b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.713555] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240822, 'name': Destroy_Task, 'duration_secs': 0.361329} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.738937] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Destroyed the VM [ 867.739282] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 867.742258] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.742472] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquired lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.742651] env[61898]: DEBUG nova.network.neutron [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.742851] env[61898]: DEBUG nova.objects.instance [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'info_cache' on Instance uuid 1fb4535d-47d8-45c5-b6d6-d05e57237b98 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 867.745072] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ed37d00f-81e2-4cda-bd9e-017a370c0dc7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.747718] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13543cf1-c6e8-422d-bf54-a8a98645ddce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.758168] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b91964-0f54-4139-a2dc-493b521fd417 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.765370] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 867.765370] env[61898]: value = "task-1240824" [ 867.765370] env[61898]: _type = "Task" [ 867.765370] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.780939] env[61898]: DEBUG nova.compute.provider_tree [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.790966] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240824, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.799827] env[61898]: DEBUG nova.compute.manager [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 867.801239] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 867.801411] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618d0e74-7448-49f8-a969-e7201855399f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.813511] env[61898]: DEBUG oslo_concurrency.lockutils [None req-df7b5a2b-8794-463f-8337-ad0b4fa24af7 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-e851d73d-58f0-486a-a95c-70d07e5faad2-bc699656-235b-4405-92f3-966811d6a509" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.267s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.814877] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 867.818708] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6c9493e-cccc-4f33-b010-223fa0206dc4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.828436] env[61898]: DEBUG oslo_vmware.api [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 867.828436] env[61898]: value = "task-1240825" [ 867.828436] env[61898]: _type = "Task" [ 867.828436] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.842954] env[61898]: DEBUG oslo_vmware.api [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240825, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.952660] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240821, 'name': RemoveSnapshot_Task, 'duration_secs': 0.829696} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.952660] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 867.952660] env[61898]: DEBUG nova.compute.manager [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 867.952660] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b74309-d15a-42a4-8b3c-6b085a95b17f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.038755] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48314} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.039109] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.043687] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.043687] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f97eb9d-420a-40dc-bc67-b14c41b612e8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.056121] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 868.056121] env[61898]: value = "task-1240826" [ 868.056121] env[61898]: _type = "Task" [ 868.056121] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.069017] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240826, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.082586] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d2f19c-4e8a-2e7e-dc12-61ef809c7fdd, 'name': SearchDatastore_Task, 'duration_secs': 0.01159} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.083238] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 868.083417] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 01685478-9d68-4edd-8dff-7d63fcd8bcd3/01685478-9d68-4edd-8dff-7d63fcd8bcd3.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 868.085015] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d79ab860-099c-4ae8-a0fa-dee7c1d23c5c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.093639] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 868.093639] env[61898]: value = "task-1240827" [ 868.093639] env[61898]: _type = "Task" [ 868.093639] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.111769] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240827, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.252831] env[61898]: DEBUG nova.objects.base [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Object Instance<1fb4535d-47d8-45c5-b6d6-d05e57237b98> lazy-loaded attributes: flavor,info_cache {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 868.278854] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240824, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.284444] env[61898]: DEBUG nova.scheduler.client.report [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 868.342241] env[61898]: DEBUG oslo_vmware.api [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240825, 'name': PowerOffVM_Task, 'duration_secs': 0.227466} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.342841] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 868.343502] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 868.343948] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1668b7ce-9cc2-4963-9732-c0537f011cab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.471026] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 868.471026] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 868.471026] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Deleting the datastore file [datastore1] e851d73d-58f0-486a-a95c-70d07e5faad2 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 868.471026] env[61898]: INFO nova.compute.manager [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Shelve offloading [ 868.472018] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d63144b3-c68a-4469-ab77-19c25e3ae6ce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.485571] env[61898]: DEBUG oslo_vmware.api [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 868.485571] env[61898]: value = "task-1240829" [ 868.485571] env[61898]: _type = "Task" [ 868.485571] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.498572] env[61898]: DEBUG oslo_vmware.api [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240829, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.567960] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240826, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081318} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.568554] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 868.569586] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61572dce-c607-41e6-9dff-ee3474228981 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.599423] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 868.599973] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fe48261-9854-4bb8-8db4-05bb389cdf12 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.628994] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240827, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472899} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.630511] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 01685478-9d68-4edd-8dff-7d63fcd8bcd3/01685478-9d68-4edd-8dff-7d63fcd8bcd3.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.630790] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.631152] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 868.631152] env[61898]: value = "task-1240830" [ 868.631152] env[61898]: _type = "Task" [ 868.631152] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.631362] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a5f8518-b8d3-4bc7-95b2-d35ca96df2cf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.644528] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240830, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.646606] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 868.646606] env[61898]: value = "task-1240831" [ 868.646606] env[61898]: _type = "Task" [ 868.646606] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.656862] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240831, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.688422] env[61898]: DEBUG oslo_concurrency.lockutils [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.688759] env[61898]: DEBUG oslo_concurrency.lockutils [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.688968] env[61898]: DEBUG oslo_concurrency.lockutils [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.689193] env[61898]: DEBUG oslo_concurrency.lockutils [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.689376] env[61898]: DEBUG oslo_concurrency.lockutils [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.691853] env[61898]: INFO nova.compute.manager [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Terminating instance [ 868.778514] env[61898]: DEBUG oslo_vmware.api [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240824, 'name': RemoveSnapshot_Task, 'duration_secs': 0.611289} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.778886] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 868.779125] env[61898]: INFO nova.compute.manager [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Took 13.10 seconds to snapshot the instance on the hypervisor. [ 868.790109] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.468s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.790576] env[61898]: DEBUG nova.compute.manager [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 868.793649] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.180s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.797020] env[61898]: INFO nova.compute.claims [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 868.976882] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.977216] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5df6d165-6a10-4fe0-875a-7b54b04e7eab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.985288] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 868.985288] env[61898]: value = "task-1240832" [ 868.985288] env[61898]: _type = "Task" [ 868.985288] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.005330] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] VM already powered off {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 869.005932] env[61898]: DEBUG nova.compute.manager [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 869.005932] env[61898]: DEBUG oslo_vmware.api [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240829, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249233} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.006619] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366bd65a-f36e-4201-b9ea-85fea3932d87 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.009465] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.009673] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 869.009853] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.010049] env[61898]: INFO nova.compute.manager [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Took 1.21 seconds to destroy the instance on the hypervisor. [ 869.010298] env[61898]: DEBUG oslo.service.loopingcall [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.010673] env[61898]: DEBUG nova.compute.manager [-] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 869.010812] env[61898]: DEBUG nova.network.neutron [-] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 869.016784] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.016949] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.017138] env[61898]: DEBUG nova.network.neutron [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.057268] env[61898]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 8658c19e-7e0e-473b-a26d-7bb0da23b75f could not be found.", "detail": ""}} {{(pid=61898) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 869.057539] env[61898]: DEBUG nova.network.neutron [-] Unable to show port 8658c19e-7e0e-473b-a26d-7bb0da23b75f as it no longer exists. {{(pid=61898) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 869.143652] env[61898]: DEBUG nova.compute.manager [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Received event network-changed-55523e4c-177b-4148-9eee-b51554447d0d {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 869.143851] env[61898]: DEBUG nova.compute.manager [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Refreshing instance network info cache due to event network-changed-55523e4c-177b-4148-9eee-b51554447d0d. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 869.146020] env[61898]: DEBUG oslo_concurrency.lockutils [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] Acquiring lock "refresh_cache-01685478-9d68-4edd-8dff-7d63fcd8bcd3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.146020] env[61898]: DEBUG oslo_concurrency.lockutils [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] Acquired lock "refresh_cache-01685478-9d68-4edd-8dff-7d63fcd8bcd3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.146020] env[61898]: DEBUG nova.network.neutron [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Refreshing network info cache for port 55523e4c-177b-4148-9eee-b51554447d0d {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 869.152921] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240830, 'name': ReconfigVM_Task, 'duration_secs': 0.29016} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.157870] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.162332] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0970eae-5ea6-445f-9ed6-5a36b1f05b40 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.179570] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 869.179570] env[61898]: value = "task-1240833" [ 869.179570] env[61898]: _type = "Task" [ 869.179570] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.179570] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240831, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071822} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.179870] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.186759] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085c0e1f-8857-4c10-bc84-99e3ebdaad17 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.203347] env[61898]: DEBUG nova.compute.manager [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 869.203506] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 869.204435] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240833, 'name': Rename_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.228107] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e195fbb9-a7f5-4840-92ad-db8ef029f535 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.244119] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 01685478-9d68-4edd-8dff-7d63fcd8bcd3/01685478-9d68-4edd-8dff-7d63fcd8bcd3.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.245358] env[61898]: DEBUG nova.network.neutron [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Updating instance_info_cache with network_info: [{"id": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "address": "fa:16:3e:4c:bb:e6", "network": {"id": "a7782ec9-c3cb-41be-b52c-f40deb036970", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-870279198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ca09762c2e4b119437aa5b1a36e133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9c5b33c-50", "ovs_interfaceid": "a9c5b33c-5075-4ced-8700-0ca1e0071262", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.247062] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-224254b3-ac18-4705-911e-199192b39046 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.265032] env[61898]: DEBUG oslo_concurrency.lockutils [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Releasing lock "refresh_cache-1fb4535d-47d8-45c5-b6d6-d05e57237b98" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.275481] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 869.276396] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87a21305-5470-449e-b203-f176604d0dac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.278907] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 869.278907] env[61898]: value = "task-1240834" [ 869.278907] env[61898]: _type = "Task" [ 869.278907] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.283826] env[61898]: DEBUG nova.compute.manager [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Instance disappeared during snapshot {{(pid=61898) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4580}} [ 869.295672] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240834, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.299647] env[61898]: DEBUG nova.compute.utils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 869.302432] env[61898]: DEBUG nova.compute.manager [None req-9099fb68-f001-44e6-98bd-16921d28643a tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Image not found during clean up 65267edc-4683-4fb8-a756-527eb335a46b {{(pid=61898) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4586}} [ 869.306379] env[61898]: DEBUG nova.compute.manager [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 869.306656] env[61898]: DEBUG nova.network.neutron [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 869.351332] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 869.351610] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 869.351853] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleting the datastore file [datastore2] 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.352162] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e600432f-92c7-4653-a15a-76aec7029213 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.361928] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquiring lock "11ca5129-0dc3-44b3-8f7b-215c93dac764" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.362249] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Lock "11ca5129-0dc3-44b3-8f7b-215c93dac764" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.363819] env[61898]: DEBUG oslo_vmware.api [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 869.363819] env[61898]: value = "task-1240836" [ 869.363819] env[61898]: _type = "Task" [ 869.363819] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.376580] env[61898]: DEBUG oslo_vmware.api [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.421167] env[61898]: DEBUG nova.policy [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2586563437fc4ab0a4b2802d4d01fe5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a984459656494b738b60ec791c579316', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 869.694022] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240833, 'name': Rename_Task, 'duration_secs': 0.162007} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.694022] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 869.694022] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c61ca584-e992-464d-87eb-241c74319363 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.701091] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 869.701091] env[61898]: value = "task-1240837" [ 869.701091] env[61898]: _type = "Task" [ 869.701091] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.711811] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240837, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.776574] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "cf428138-4d0d-43bf-a654-06a62a82c9a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.776884] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "cf428138-4d0d-43bf-a654-06a62a82c9a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.789484] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240834, 'name': ReconfigVM_Task, 'duration_secs': 0.378299} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.790355] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 01685478-9d68-4edd-8dff-7d63fcd8bcd3/01685478-9d68-4edd-8dff-7d63fcd8bcd3.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.791079] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-60143bf3-90e5-4759-afa6-f89c443e00e8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.800095] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 869.800095] env[61898]: value = "task-1240838" [ 869.800095] env[61898]: _type = "Task" [ 869.800095] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.815650] env[61898]: DEBUG nova.compute.manager [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 869.818517] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240838, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.873142] env[61898]: DEBUG nova.compute.manager [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 869.877485] env[61898]: DEBUG nova.compute.manager [req-0828978d-6f49-4cd7-80c9-4e843ead9b8a req-7535e2c7-d9f4-4b6f-bf33-d3c3b47b9984 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received event network-vif-deleted-8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 869.877716] env[61898]: INFO nova.compute.manager [req-0828978d-6f49-4cd7-80c9-4e843ead9b8a req-7535e2c7-d9f4-4b6f-bf33-d3c3b47b9984 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Neutron deleted interface 8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2; detaching it from the instance and deleting it from the info cache [ 869.878346] env[61898]: DEBUG nova.network.neutron [req-0828978d-6f49-4cd7-80c9-4e843ead9b8a req-7535e2c7-d9f4-4b6f-bf33-d3c3b47b9984 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [{"id": "bc699656-235b-4405-92f3-966811d6a509", "address": "fa:16:3e:67:08:7d", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc699656-23", "ovs_interfaceid": "bc699656-235b-4405-92f3-966811d6a509", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.895325] env[61898]: DEBUG oslo_vmware.api [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159344} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.895325] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.895557] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 869.895692] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.896494] env[61898]: INFO nova.compute.manager [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Took 0.69 seconds to destroy the instance on the hypervisor. [ 869.896494] env[61898]: DEBUG oslo.service.loopingcall [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.902372] env[61898]: DEBUG nova.compute.manager [-] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 869.902372] env[61898]: DEBUG nova.network.neutron [-] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 870.220820] env[61898]: DEBUG nova.network.neutron [-] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.226394] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240837, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.239905] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef938e2c-81f3-40aa-816c-76464e9cd93e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.252232] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02473759-acc5-4ed9-9fdb-a3c21f2b2ae3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.289846] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.290967] env[61898]: DEBUG nova.compute.manager [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 870.297496] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1dcbf0b6-22bf-4fcc-b4da-d6557568ebca {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.299447] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ca62fc-d68d-4277-8709-0454ea5baced {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.311443] env[61898]: DEBUG oslo_vmware.api [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 870.311443] env[61898]: value = "task-1240839" [ 870.311443] env[61898]: _type = "Task" [ 870.311443] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.320147] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c84bc1-39ad-423b-870e-913aebd8364e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.323718] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240838, 'name': Rename_Task, 'duration_secs': 0.191725} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.327668] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.328903] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dba71ec4-89f4-4835-881f-ef275772f9e4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.334724] env[61898]: DEBUG oslo_vmware.api [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240839, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.343896] env[61898]: DEBUG nova.compute.provider_tree [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.348399] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 870.348399] env[61898]: value = "task-1240840" [ 870.348399] env[61898]: _type = "Task" [ 870.348399] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.358184] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240840, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.365618] env[61898]: DEBUG nova.network.neutron [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updating instance_info_cache with network_info: [{"id": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "address": "fa:16:3e:0e:f5:a4", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd3bd232-22", "ovs_interfaceid": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.386505] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93ddf2a9-6a9e-41e2-87ab-eef3e94286c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.399640] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e79e099-de20-41d5-ba56-01c47ba2a2cd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.411505] env[61898]: DEBUG nova.network.neutron [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Successfully created port: 53551414-df45-4670-abea-be494090dd14 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 870.414309] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.440142] env[61898]: DEBUG nova.compute.manager [req-0828978d-6f49-4cd7-80c9-4e843ead9b8a req-7535e2c7-d9f4-4b6f-bf33-d3c3b47b9984 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Detach interface failed, port_id=8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2, reason: Instance e851d73d-58f0-486a-a95c-70d07e5faad2 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 870.441284] env[61898]: DEBUG nova.network.neutron [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Updated VIF entry in instance network info cache for port 55523e4c-177b-4148-9eee-b51554447d0d. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 870.442567] env[61898]: DEBUG nova.network.neutron [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Updating instance_info_cache with network_info: [{"id": "55523e4c-177b-4148-9eee-b51554447d0d", "address": "fa:16:3e:80:c2:39", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55523e4c-17", "ovs_interfaceid": "55523e4c-177b-4148-9eee-b51554447d0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.685608] env[61898]: DEBUG nova.network.neutron [-] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.713803] env[61898]: DEBUG oslo_vmware.api [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240837, 'name': PowerOnVM_Task, 'duration_secs': 0.524502} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.714194] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.714508] env[61898]: DEBUG nova.compute.manager [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 870.715747] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3493a499-d0fb-4e8b-98b8-f4418808ed77 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.728092] env[61898]: INFO nova.compute.manager [-] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Took 1.72 seconds to deallocate network for instance. [ 870.821110] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.826119] env[61898]: DEBUG oslo_vmware.api [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240839, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.829476] env[61898]: DEBUG nova.compute.manager [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 870.850056] env[61898]: DEBUG nova.scheduler.client.report [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 870.867391] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240840, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.870564] env[61898]: DEBUG nova.virt.hardware [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 870.870961] env[61898]: DEBUG nova.virt.hardware [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 870.871284] env[61898]: DEBUG nova.virt.hardware [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 870.871584] env[61898]: DEBUG nova.virt.hardware [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 870.871802] env[61898]: DEBUG nova.virt.hardware [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 870.872083] env[61898]: DEBUG nova.virt.hardware [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 870.872376] env[61898]: DEBUG nova.virt.hardware [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 870.872645] env[61898]: DEBUG nova.virt.hardware [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 870.872905] env[61898]: DEBUG nova.virt.hardware [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 870.873132] env[61898]: DEBUG nova.virt.hardware [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 870.873411] env[61898]: DEBUG nova.virt.hardware [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 870.875536] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.877316] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ad4c0c-408e-4eb1-a213-543eb6e16e50 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.890215] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670425f4-8c2a-4644-8220-343f3a329aae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.944845] env[61898]: DEBUG oslo_concurrency.lockutils [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] Releasing lock "refresh_cache-01685478-9d68-4edd-8dff-7d63fcd8bcd3" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.945252] env[61898]: DEBUG nova.compute.manager [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Received event network-vif-deleted-bc699656-235b-4405-92f3-966811d6a509 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 870.945451] env[61898]: INFO nova.compute.manager [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Neutron deleted interface bc699656-235b-4405-92f3-966811d6a509; detaching it from the instance and deleting it from the info cache [ 870.945905] env[61898]: DEBUG nova.network.neutron [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Updating instance_info_cache with network_info: [{"id": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "address": "fa:16:3e:40:36:b4", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8eab7c47-4a", "ovs_interfaceid": "8eab7c47-4aa7-4c31-b77b-9fac0ec5d8d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.192476] env[61898]: INFO nova.compute.manager [-] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Took 1.29 seconds to deallocate network for instance. [ 871.239023] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.239023] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.323946] env[61898]: DEBUG oslo_vmware.api [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240839, 'name': PowerOnVM_Task, 'duration_secs': 0.627957} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.324264] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.324510] env[61898]: DEBUG nova.compute.manager [None req-dfa1f48e-ab24-485c-8b17-5e41f18ca69f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 871.325409] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d02ad63-d17d-4f8f-a0e6-a397aa9b7adf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.361994] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.567s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.361994] env[61898]: DEBUG nova.compute.manager [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 871.367974] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.203s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.368214] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.370296] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.815s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.371718] env[61898]: INFO nova.compute.claims [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.377025] env[61898]: DEBUG oslo_vmware.api [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240840, 'name': PowerOnVM_Task, 'duration_secs': 0.589874} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.377025] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.377025] env[61898]: INFO nova.compute.manager [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Took 7.98 seconds to spawn the instance on the hypervisor. [ 871.377025] env[61898]: DEBUG nova.compute.manager [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 871.377025] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22436596-9dfa-4c00-9f6d-41d5ba6b63b7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.403678] env[61898]: INFO nova.scheduler.client.report [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Deleted allocations for instance d6c96dce-13ae-411a-b52a-fee484718a8a [ 871.449403] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-669833db-8713-493c-a224-623931f99575 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.460799] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde0fd42-bd9b-4045-b818-2f9e077dace2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.496565] env[61898]: DEBUG nova.compute.manager [req-b9218a6f-5583-4d49-b446-ce851ea47ef3 req-746633c0-7d6d-41a6-8c3c-974ac5a21571 service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Detach interface failed, port_id=bc699656-235b-4405-92f3-966811d6a509, reason: Instance e851d73d-58f0-486a-a95c-70d07e5faad2 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 871.541100] env[61898]: DEBUG nova.compute.manager [req-cbd539d0-3f06-42a4-8c6b-0d2fb221d43c req-3e9fc61d-aba3-4fba-863d-b478b47f2184 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Received event network-vif-unplugged-cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 871.541373] env[61898]: DEBUG oslo_concurrency.lockutils [req-cbd539d0-3f06-42a4-8c6b-0d2fb221d43c req-3e9fc61d-aba3-4fba-863d-b478b47f2184 service nova] Acquiring lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.541595] env[61898]: DEBUG oslo_concurrency.lockutils [req-cbd539d0-3f06-42a4-8c6b-0d2fb221d43c req-3e9fc61d-aba3-4fba-863d-b478b47f2184 service nova] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.541768] env[61898]: DEBUG oslo_concurrency.lockutils [req-cbd539d0-3f06-42a4-8c6b-0d2fb221d43c req-3e9fc61d-aba3-4fba-863d-b478b47f2184 service nova] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.541961] env[61898]: DEBUG nova.compute.manager [req-cbd539d0-3f06-42a4-8c6b-0d2fb221d43c req-3e9fc61d-aba3-4fba-863d-b478b47f2184 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] No waiting events found dispatching network-vif-unplugged-cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 871.542170] env[61898]: WARNING nova.compute.manager [req-cbd539d0-3f06-42a4-8c6b-0d2fb221d43c req-3e9fc61d-aba3-4fba-863d-b478b47f2184 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Received unexpected event network-vif-unplugged-cd3bd232-226d-4ac0-a9f8-17b93aca92fb for instance with vm_state shelved and task_state shelving_offloading. [ 871.551928] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 871.552819] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66290d48-a25a-425d-a41c-1bf727fa1df0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.560947] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 871.561248] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3c097a0-ebe3-4854-b7fe-9d6f71276d7d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.638276] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 871.638488] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 871.638680] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleting the datastore file [datastore2] 070bc0cc-ff77-48b8-bd08-f17fe69e25af {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 871.638964] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c77b586f-9ecc-4710-81c3-2da3f05d5cb8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.646919] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 871.646919] env[61898]: value = "task-1240842" [ 871.646919] env[61898]: _type = "Task" [ 871.646919] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.655682] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240842, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.701428] env[61898]: DEBUG oslo_concurrency.lockutils [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.884027] env[61898]: DEBUG nova.compute.utils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 871.884027] env[61898]: DEBUG nova.compute.manager [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 871.884027] env[61898]: DEBUG nova.network.neutron [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 871.896197] env[61898]: INFO nova.compute.manager [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Took 18.10 seconds to build instance. [ 871.912295] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78fdb287-a701-442b-8af2-d0f1066e73f8 tempest-AttachInterfacesUnderV243Test-470425024 tempest-AttachInterfacesUnderV243Test-470425024-project-member] Lock "d6c96dce-13ae-411a-b52a-fee484718a8a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.768s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.974182] env[61898]: DEBUG nova.policy [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a910d0cdf3cd4b17af818abd25a38b79', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ce0562f486e44cc877c1cc31525a13a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 871.978441] env[61898]: DEBUG nova.compute.manager [req-023bb924-9026-44b2-908d-4fe95920e5fe req-7f04aeee-6f89-4159-85de-e714a3d64683 service nova] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Received event network-vif-deleted-b1aac51c-a20e-43a4-94eb-1aaf57b59f76 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 872.158498] env[61898]: DEBUG oslo_vmware.api [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240842, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.437344} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.158719] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 872.158907] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 872.159096] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 872.185921] env[61898]: INFO nova.scheduler.client.report [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleted allocations for instance 070bc0cc-ff77-48b8-bd08-f17fe69e25af [ 872.388310] env[61898]: DEBUG nova.compute.manager [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 872.398086] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cb851c6c-b994-4280-be1a-c9f829537477 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.613s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.653089] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4142b7ee-15c2-48e7-9b43-f6fcdd8fce24 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.662064] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00e23b1-c5dd-43e5-932f-a1bc233dece7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.697329] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.698935] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d83883-8587-48ae-8b93-b9e49ccb2f22 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.709144] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed453992-8b1d-4fa0-8ad3-495071aa0a24 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.727844] env[61898]: DEBUG nova.compute.provider_tree [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.953394] env[61898]: DEBUG nova.network.neutron [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Successfully created port: dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 873.166482] env[61898]: DEBUG nova.network.neutron [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Successfully updated port: 53551414-df45-4670-abea-be494090dd14 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 873.231243] env[61898]: DEBUG nova.scheduler.client.report [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 873.399619] env[61898]: DEBUG nova.compute.manager [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 873.427835] env[61898]: DEBUG nova.virt.hardware [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 873.428175] env[61898]: DEBUG nova.virt.hardware [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 873.428346] env[61898]: DEBUG nova.virt.hardware [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 873.428562] env[61898]: DEBUG nova.virt.hardware [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 873.430588] env[61898]: DEBUG nova.virt.hardware [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 873.434021] env[61898]: DEBUG nova.virt.hardware [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 873.434021] env[61898]: DEBUG nova.virt.hardware [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 873.434021] env[61898]: DEBUG nova.virt.hardware [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 873.434021] env[61898]: DEBUG nova.virt.hardware [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 873.434021] env[61898]: DEBUG nova.virt.hardware [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 873.434021] env[61898]: DEBUG nova.virt.hardware [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 873.434663] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f979569e-b03a-4c7f-bdb2-3e82e4f5809d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.449485] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5ba43b-8392-4041-9453-fa17028857d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.672031] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.672031] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.672031] env[61898]: DEBUG nova.network.neutron [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 873.738481] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.738777] env[61898]: DEBUG nova.compute.manager [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 873.744566] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.090s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.746588] env[61898]: INFO nova.compute.claims [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.770158] env[61898]: DEBUG nova.compute.manager [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Received event network-changed-cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 873.770399] env[61898]: DEBUG nova.compute.manager [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Refreshing instance network info cache due to event network-changed-cd3bd232-226d-4ac0-a9f8-17b93aca92fb. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 873.770599] env[61898]: DEBUG oslo_concurrency.lockutils [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] Acquiring lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.770768] env[61898]: DEBUG oslo_concurrency.lockutils [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] Acquired lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.770935] env[61898]: DEBUG nova.network.neutron [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Refreshing network info cache for port cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 873.892802] env[61898]: INFO nova.compute.manager [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Rebuilding instance [ 873.941896] env[61898]: DEBUG nova.compute.manager [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 873.942895] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78a62c6-1cc5-40b8-a9fe-3573ba593521 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.044054] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.044378] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.044652] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.044876] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.045086] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.047608] env[61898]: INFO nova.compute.manager [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Terminating instance [ 874.236972] env[61898]: DEBUG nova.network.neutron [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 874.252410] env[61898]: DEBUG nova.compute.utils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 874.256398] env[61898]: DEBUG nova.compute.manager [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 874.256574] env[61898]: DEBUG nova.network.neutron [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 874.408526] env[61898]: DEBUG nova.policy [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee555b0896f748d1886e7037911db84f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c2e835a924c438287e7626c34c2fb05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 874.551813] env[61898]: DEBUG nova.compute.manager [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 874.552172] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 874.553115] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed14bef3-002c-4f5f-8231-93b8235fc2d5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.562238] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.562606] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72504d3b-adf1-476d-a20a-1543e6267a91 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.570375] env[61898]: DEBUG oslo_vmware.api [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 874.570375] env[61898]: value = "task-1240843" [ 874.570375] env[61898]: _type = "Task" [ 874.570375] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.579546] env[61898]: DEBUG oslo_vmware.api [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.651237] env[61898]: DEBUG nova.network.neutron [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Updating instance_info_cache with network_info: [{"id": "53551414-df45-4670-abea-be494090dd14", "address": "fa:16:3e:1f:b4:46", "network": {"id": "616f0775-546c-4124-b414-c2ce3228e7ec", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-245576291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a984459656494b738b60ec791c579316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53551414-df", "ovs_interfaceid": "53551414-df45-4670-abea-be494090dd14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.673623] env[61898]: DEBUG nova.network.neutron [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updated VIF entry in instance network info cache for port cd3bd232-226d-4ac0-a9f8-17b93aca92fb. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 874.674197] env[61898]: DEBUG nova.network.neutron [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updating instance_info_cache with network_info: [{"id": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "address": "fa:16:3e:0e:f5:a4", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapcd3bd232-22", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.759444] env[61898]: DEBUG nova.compute.manager [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 874.958016] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 874.958363] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9bad444-0f92-47fe-a2d6-eb5fc135fb10 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.968426] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 874.968426] env[61898]: value = "task-1240844" [ 874.968426] env[61898]: _type = "Task" [ 874.968426] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.983854] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240844, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.052171] env[61898]: DEBUG oslo_vmware.rw_handles [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb0d0a-0864-62a1-e0d6-29a9be4ce249/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 875.053752] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe14e2f-da3b-4876-8106-9f0cb4c1edfb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.065195] env[61898]: DEBUG oslo_vmware.rw_handles [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb0d0a-0864-62a1-e0d6-29a9be4ce249/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 875.067016] env[61898]: ERROR oslo_vmware.rw_handles [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb0d0a-0864-62a1-e0d6-29a9be4ce249/disk-0.vmdk due to incomplete transfer. [ 875.067016] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4daa0c6c-e060-4d4a-80f9-814b27a88d70 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.078153] env[61898]: DEBUG oslo_vmware.rw_handles [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fb0d0a-0864-62a1-e0d6-29a9be4ce249/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 875.078612] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Uploaded image 7dc93144-107e-4f85-9947-21db0434e8b3 to the Glance image server {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 875.082023] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 875.084119] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a41c0300-fa39-42ed-95df-303fb4f79534 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.089701] env[61898]: DEBUG oslo_vmware.api [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240843, 'name': PowerOffVM_Task, 'duration_secs': 0.244902} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.090052] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 875.090288] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 875.090557] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cdcba3da-dd07-417a-99e7-842b8e2869e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.095316] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 875.095316] env[61898]: value = "task-1240845" [ 875.095316] env[61898]: _type = "Task" [ 875.095316] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.107374] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240845, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.152138] env[61898]: DEBUG nova.compute.manager [req-3a414880-1655-4a03-99b5-dc7da3facdd2 req-20d0ee12-2a80-4782-a1c3-c74bab26637c service nova] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Received event network-vif-plugged-dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 875.152138] env[61898]: DEBUG oslo_concurrency.lockutils [req-3a414880-1655-4a03-99b5-dc7da3facdd2 req-20d0ee12-2a80-4782-a1c3-c74bab26637c service nova] Acquiring lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.152138] env[61898]: DEBUG oslo_concurrency.lockutils [req-3a414880-1655-4a03-99b5-dc7da3facdd2 req-20d0ee12-2a80-4782-a1c3-c74bab26637c service nova] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.152138] env[61898]: DEBUG oslo_concurrency.lockutils [req-3a414880-1655-4a03-99b5-dc7da3facdd2 req-20d0ee12-2a80-4782-a1c3-c74bab26637c service nova] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.152138] env[61898]: DEBUG nova.compute.manager [req-3a414880-1655-4a03-99b5-dc7da3facdd2 req-20d0ee12-2a80-4782-a1c3-c74bab26637c service nova] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] No waiting events found dispatching network-vif-plugged-dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 875.152138] env[61898]: WARNING nova.compute.manager [req-3a414880-1655-4a03-99b5-dc7da3facdd2 req-20d0ee12-2a80-4782-a1c3-c74bab26637c service nova] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Received unexpected event network-vif-plugged-dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a for instance with vm_state building and task_state spawning. [ 875.154648] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Releasing lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.155027] env[61898]: DEBUG nova.compute.manager [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Instance network_info: |[{"id": "53551414-df45-4670-abea-be494090dd14", "address": "fa:16:3e:1f:b4:46", "network": {"id": "616f0775-546c-4124-b414-c2ce3228e7ec", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-245576291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a984459656494b738b60ec791c579316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53551414-df", "ovs_interfaceid": "53551414-df45-4670-abea-be494090dd14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 875.156038] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:b4:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53551414-df45-4670-abea-be494090dd14', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 875.169891] env[61898]: DEBUG oslo.service.loopingcall [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 875.171526] env[61898]: DEBUG nova.network.neutron [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Successfully created port: ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 875.178498] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 875.178843] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 875.179080] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 875.179297] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleting the datastore file [datastore1] 01685478-9d68-4edd-8dff-7d63fcd8bcd3 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 875.180238] env[61898]: DEBUG oslo_concurrency.lockutils [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] Releasing lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.180736] env[61898]: DEBUG nova.compute.manager [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Received event network-vif-plugged-53551414-df45-4670-abea-be494090dd14 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 875.180736] env[61898]: DEBUG oslo_concurrency.lockutils [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] Acquiring lock "80931b22-a69b-41cd-b707-13bf11111b88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.180880] env[61898]: DEBUG oslo_concurrency.lockutils [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] Lock "80931b22-a69b-41cd-b707-13bf11111b88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.181046] env[61898]: DEBUG oslo_concurrency.lockutils [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] Lock "80931b22-a69b-41cd-b707-13bf11111b88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.181253] env[61898]: DEBUG nova.compute.manager [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] No waiting events found dispatching network-vif-plugged-53551414-df45-4670-abea-be494090dd14 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 875.181429] env[61898]: WARNING nova.compute.manager [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Received unexpected event network-vif-plugged-53551414-df45-4670-abea-be494090dd14 for instance with vm_state building and task_state spawning. [ 875.181750] env[61898]: DEBUG nova.compute.manager [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Received event network-changed-53551414-df45-4670-abea-be494090dd14 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 875.181812] env[61898]: DEBUG nova.compute.manager [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Refreshing instance network info cache due to event network-changed-53551414-df45-4670-abea-be494090dd14. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 875.182033] env[61898]: DEBUG oslo_concurrency.lockutils [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] Acquiring lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.182200] env[61898]: DEBUG oslo_concurrency.lockutils [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] Acquired lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.182369] env[61898]: DEBUG nova.network.neutron [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Refreshing network info cache for port 53551414-df45-4670-abea-be494090dd14 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 875.184298] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae01d98e-6a54-4355-bcf6-0173e7643433 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.188473] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9fdffcab-ed04-4879-8863-71468da7b16f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.205433] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9181ea1b-5679-46d2-bb7e-404543618c27 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.217534] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071d1756-0b58-4f6f-8eb6-3d8dbfba660a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.220393] env[61898]: DEBUG oslo_vmware.api [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 875.220393] env[61898]: value = "task-1240847" [ 875.220393] env[61898]: _type = "Task" [ 875.220393] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.221921] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 875.221921] env[61898]: value = "task-1240848" [ 875.221921] env[61898]: _type = "Task" [ 875.221921] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.256841] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da5d638-ef4c-415f-98b6-ef1f8d16602b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.263403] env[61898]: DEBUG oslo_vmware.api [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240847, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.273101] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.273101] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240848, 'name': CreateVM_Task} progress is 15%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.277709] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f098c96a-f660-4e67-a835-ec2e914bbc7d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.293780] env[61898]: DEBUG nova.compute.provider_tree [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.481180] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240844, 'name': PowerOffVM_Task, 'duration_secs': 0.229633} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.481559] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 875.481821] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 875.482702] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154fe428-cf2b-44a9-a62e-b5e6906ad1f2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.491435] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 875.491753] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6633063-e5af-4417-a65f-e58c1674e1f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.606823] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240845, 'name': Destroy_Task, 'duration_secs': 0.392701} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.607136] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Destroyed the VM [ 875.607379] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 875.607655] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f5fd48cf-5987-425f-8e77-b9a4179224a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.615258] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 875.615258] env[61898]: value = "task-1240850" [ 875.615258] env[61898]: _type = "Task" [ 875.615258] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.621145] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 875.621719] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 875.622037] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleting the datastore file [datastore1] 1aa03975-f18f-4e64-836e-e991b73ee9d5 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 875.627379] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92b178dc-57f5-4d54-9e68-b851c008e78f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.629469] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240850, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.635413] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 875.635413] env[61898]: value = "task-1240851" [ 875.635413] env[61898]: _type = "Task" [ 875.635413] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.644797] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240851, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.682747] env[61898]: DEBUG nova.network.neutron [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Successfully updated port: dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 875.734815] env[61898]: DEBUG oslo_vmware.api [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240847, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169423} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.738323] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.738569] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 875.738791] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 875.739025] env[61898]: INFO nova.compute.manager [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Took 1.19 seconds to destroy the instance on the hypervisor. [ 875.739432] env[61898]: DEBUG oslo.service.loopingcall [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 875.740087] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240848, 'name': CreateVM_Task, 'duration_secs': 0.384697} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.740305] env[61898]: DEBUG nova.compute.manager [-] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 875.740501] env[61898]: DEBUG nova.network.neutron [-] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 875.742078] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 875.742712] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.742879] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.743310] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 875.743866] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2defee3-1502-4d90-8cce-ffc7e95500cb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.749724] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 875.749724] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ebc746-10ca-9722-5438-2d8c5cb41fbe" [ 875.749724] env[61898]: _type = "Task" [ 875.749724] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.761755] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ebc746-10ca-9722-5438-2d8c5cb41fbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.777797] env[61898]: DEBUG nova.compute.manager [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 875.799952] env[61898]: DEBUG nova.scheduler.client.report [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 875.827650] env[61898]: DEBUG nova.virt.hardware [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 875.827650] env[61898]: DEBUG nova.virt.hardware [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 875.827650] env[61898]: DEBUG nova.virt.hardware [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 875.827650] env[61898]: DEBUG nova.virt.hardware [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 875.827894] env[61898]: DEBUG nova.virt.hardware [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 875.827933] env[61898]: DEBUG nova.virt.hardware [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 875.828148] env[61898]: DEBUG nova.virt.hardware [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 875.828314] env[61898]: DEBUG nova.virt.hardware [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 875.828481] env[61898]: DEBUG nova.virt.hardware [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 875.828689] env[61898]: DEBUG nova.virt.hardware [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 875.828881] env[61898]: DEBUG nova.virt.hardware [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 875.829818] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bc4e91-0c2a-4a65-ae51-14a72aeeedc6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.843220] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b12fac-1d62-428c-a4e5-f288f76611a6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.081522] env[61898]: DEBUG nova.compute.manager [req-1a5825a4-20ad-486b-8329-90e5a7b81a09 req-de314c56-0d03-4fe6-8c21-458227c85237 service nova] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Received event network-changed-dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 876.081522] env[61898]: DEBUG nova.compute.manager [req-1a5825a4-20ad-486b-8329-90e5a7b81a09 req-de314c56-0d03-4fe6-8c21-458227c85237 service nova] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Refreshing instance network info cache due to event network-changed-dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 876.081522] env[61898]: DEBUG oslo_concurrency.lockutils [req-1a5825a4-20ad-486b-8329-90e5a7b81a09 req-de314c56-0d03-4fe6-8c21-458227c85237 service nova] Acquiring lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.081522] env[61898]: DEBUG oslo_concurrency.lockutils [req-1a5825a4-20ad-486b-8329-90e5a7b81a09 req-de314c56-0d03-4fe6-8c21-458227c85237 service nova] Acquired lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.081522] env[61898]: DEBUG nova.network.neutron [req-1a5825a4-20ad-486b-8329-90e5a7b81a09 req-de314c56-0d03-4fe6-8c21-458227c85237 service nova] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Refreshing network info cache for port dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 876.085712] env[61898]: DEBUG nova.network.neutron [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Updated VIF entry in instance network info cache for port 53551414-df45-4670-abea-be494090dd14. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 876.085712] env[61898]: DEBUG nova.network.neutron [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Updating instance_info_cache with network_info: [{"id": "53551414-df45-4670-abea-be494090dd14", "address": "fa:16:3e:1f:b4:46", "network": {"id": "616f0775-546c-4124-b414-c2ce3228e7ec", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-245576291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a984459656494b738b60ec791c579316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53551414-df", "ovs_interfaceid": "53551414-df45-4670-abea-be494090dd14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.126772] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240850, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.145849] env[61898]: DEBUG nova.compute.manager [req-b42aa07f-9367-4c14-8e08-8dd13b7b4827 req-db526926-d5d1-4b6f-a7da-415e257bab24 service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Received event network-vif-deleted-55523e4c-177b-4148-9eee-b51554447d0d {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 876.145849] env[61898]: INFO nova.compute.manager [req-b42aa07f-9367-4c14-8e08-8dd13b7b4827 req-db526926-d5d1-4b6f-a7da-415e257bab24 service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Neutron deleted interface 55523e4c-177b-4148-9eee-b51554447d0d; detaching it from the instance and deleting it from the info cache [ 876.146080] env[61898]: DEBUG nova.network.neutron [req-b42aa07f-9367-4c14-8e08-8dd13b7b4827 req-db526926-d5d1-4b6f-a7da-415e257bab24 service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.151567] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240851, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161386} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.152288] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 876.152288] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 876.152491] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 876.187883] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.261022] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ebc746-10ca-9722-5438-2d8c5cb41fbe, 'name': SearchDatastore_Task, 'duration_secs': 0.011046} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.261368] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.261614] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 876.261909] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.262079] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.262268] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 876.262542] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ba48ca1-58d2-4997-9efc-156ce1f6f140 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.275021] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 876.275021] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 876.275021] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-886b1919-fe2a-48aa-8570-1e22afb677f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.281141] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 876.281141] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e6c4ae-b503-6086-b14d-30a939804836" [ 876.281141] env[61898]: _type = "Task" [ 876.281141] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.290982] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e6c4ae-b503-6086-b14d-30a939804836, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.302966] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.303528] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 876.306298] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.609s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.307765] env[61898]: INFO nova.compute.claims [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 876.515245] env[61898]: DEBUG nova.network.neutron [-] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.588404] env[61898]: DEBUG oslo_concurrency.lockutils [req-2b60eeb7-f538-4f96-879f-90c8a15bb3f5 req-770f1d93-d484-48f8-9e55-188b12fdcb92 service nova] Releasing lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.624778] env[61898]: DEBUG nova.network.neutron [req-1a5825a4-20ad-486b-8329-90e5a7b81a09 req-de314c56-0d03-4fe6-8c21-458227c85237 service nova] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.630446] env[61898]: DEBUG oslo_vmware.api [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240850, 'name': RemoveSnapshot_Task, 'duration_secs': 0.571456} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.630721] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 876.630973] env[61898]: INFO nova.compute.manager [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Took 21.53 seconds to snapshot the instance on the hypervisor. [ 876.653536] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b101d70f-028e-40d0-baa9-e388d677ff43 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.674888] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3aaabb4-1acc-4b66-8866-d71b2a9c480c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.714468] env[61898]: DEBUG nova.compute.manager [req-b42aa07f-9367-4c14-8e08-8dd13b7b4827 req-db526926-d5d1-4b6f-a7da-415e257bab24 service nova] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Detach interface failed, port_id=55523e4c-177b-4148-9eee-b51554447d0d, reason: Instance 01685478-9d68-4edd-8dff-7d63fcd8bcd3 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 876.792828] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e6c4ae-b503-6086-b14d-30a939804836, 'name': SearchDatastore_Task, 'duration_secs': 0.041559} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.793010] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13f11d77-7949-46d9-b8bd-de1713795001 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.795904] env[61898]: DEBUG nova.network.neutron [req-1a5825a4-20ad-486b-8329-90e5a7b81a09 req-de314c56-0d03-4fe6-8c21-458227c85237 service nova] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.800759] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 876.800759] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]522771b6-0f4f-2b3f-03ac-6e2b5d28e2eb" [ 876.800759] env[61898]: _type = "Task" [ 876.800759] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.809260] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522771b6-0f4f-2b3f-03ac-6e2b5d28e2eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.811677] env[61898]: DEBUG nova.compute.utils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 876.815721] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 876.815955] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 876.914322] env[61898]: DEBUG nova.policy [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f64f22a09c344e468e74742efbd05cff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a476b83a7bda4078b4690a73adfea8c9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 877.018208] env[61898]: INFO nova.compute.manager [-] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Took 1.28 seconds to deallocate network for instance. [ 877.178134] env[61898]: DEBUG nova.compute.manager [None req-52c5985f-428e-456d-86aa-d90c513f0816 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Found 2 images (rotation: 2) {{(pid=61898) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4883}} [ 877.193891] env[61898]: DEBUG nova.virt.hardware [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 877.194153] env[61898]: DEBUG nova.virt.hardware [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 877.194317] env[61898]: DEBUG nova.virt.hardware [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.194712] env[61898]: DEBUG nova.virt.hardware [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 877.194712] env[61898]: DEBUG nova.virt.hardware [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.194813] env[61898]: DEBUG nova.virt.hardware [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 877.194990] env[61898]: DEBUG nova.virt.hardware [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 877.195164] env[61898]: DEBUG nova.virt.hardware [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 877.195335] env[61898]: DEBUG nova.virt.hardware [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 877.195501] env[61898]: DEBUG nova.virt.hardware [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 877.195689] env[61898]: DEBUG nova.virt.hardware [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 877.197394] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d41c626-3897-4ee8-8eec-9e530ce04ef9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.206950] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bc0782-f2ba-4b75-bd63-c8f30bf9242c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.221509] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:5d:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ddb06f4c-13ed-4322-b1e8-f4022b32e4f4', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.229321] env[61898]: DEBUG oslo.service.loopingcall [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 877.229599] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 877.229827] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abd1f1cf-0b2d-42f0-be5a-448130e09174 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.251532] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.251532] env[61898]: value = "task-1240852" [ 877.251532] env[61898]: _type = "Task" [ 877.251532] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.262083] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240852, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.298670] env[61898]: DEBUG oslo_concurrency.lockutils [req-1a5825a4-20ad-486b-8329-90e5a7b81a09 req-de314c56-0d03-4fe6-8c21-458227c85237 service nova] Releasing lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.299183] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.299349] env[61898]: DEBUG nova.network.neutron [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.314978] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522771b6-0f4f-2b3f-03ac-6e2b5d28e2eb, 'name': SearchDatastore_Task, 'duration_secs': 0.010082} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.315452] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 877.318258] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.318533] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 80931b22-a69b-41cd-b707-13bf11111b88/80931b22-a69b-41cd-b707-13bf11111b88.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 877.322440] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfcba176-537b-42ff-a4e2-d694028d7152 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.332166] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 877.332166] env[61898]: value = "task-1240853" [ 877.332166] env[61898]: _type = "Task" [ 877.332166] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.346327] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240853, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.530883] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.697988] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-259c203b-a51d-4d43-a503-28e4d7c3778e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.708716] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93f1100-366b-4747-aa01-875387e485fd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.743835] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160efaba-8c46-4cc3-9136-d9f7336e50d0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.759275] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea81085-a9de-4483-bef3-5cfef015386c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.779907] env[61898]: DEBUG nova.compute.provider_tree [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.786252] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240852, 'name': CreateVM_Task, 'duration_secs': 0.332765} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.786727] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 877.787754] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.787933] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.788298] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 877.788576] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-441e14f2-482f-4d41-916b-02e012d826ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.797839] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 877.797839] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52bcd996-ea69-8c51-f3ca-d29c9bbea261" [ 877.797839] env[61898]: _type = "Task" [ 877.797839] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.814683] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52bcd996-ea69-8c51-f3ca-d29c9bbea261, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.857832] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240853, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.859617] env[61898]: DEBUG nova.network.neutron [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 877.910538] env[61898]: DEBUG nova.network.neutron [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Successfully updated port: ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 877.973262] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Successfully created port: df9c8d59-f506-4a95-b90b-85b338619b4a {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 878.128825] env[61898]: DEBUG nova.network.neutron [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance_info_cache with network_info: [{"id": "dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a", "address": "fa:16:3e:08:d8:91", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc3e4cf3-8b", "ovs_interfaceid": "dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.288902] env[61898]: DEBUG nova.scheduler.client.report [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 878.312378] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52bcd996-ea69-8c51-f3ca-d29c9bbea261, 'name': SearchDatastore_Task, 'duration_secs': 0.021561} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.313789] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.315454] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.315454] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.315454] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.315454] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.315454] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d55d3da-497b-4dc3-8b9c-8fdec9bd167a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.328357] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 878.330559] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.330859] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 878.331773] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b4ff425-01f6-4b3f-808c-81d9374b1853 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.342395] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 878.342395] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]523b37e6-4f48-8e63-5a39-57500b40c833" [ 878.342395] env[61898]: _type = "Task" [ 878.342395] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.346079] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240853, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.744163} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.350331] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 80931b22-a69b-41cd-b707-13bf11111b88/80931b22-a69b-41cd-b707-13bf11111b88.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 878.350610] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.351300] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a036aca-418b-4e7c-8394-864f4237ea6a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.361781] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523b37e6-4f48-8e63-5a39-57500b40c833, 'name': SearchDatastore_Task, 'duration_secs': 0.01075} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.365467] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 878.365806] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 878.365892] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.366054] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 878.366215] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.366371] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 878.366627] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 878.366797] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 878.367235] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 878.367964] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 878.367964] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 878.368911] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 878.368911] env[61898]: value = "task-1240854" [ 878.368911] env[61898]: _type = "Task" [ 878.368911] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.369802] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26901889-81cb-4cf4-8fb7-3e436bb8ac08 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.374116] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98d27db6-eeb3-4178-b76d-d20000bffc59 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.389261] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240854, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.394405] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 878.394405] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52725cfc-3c96-23a5-e8b7-99e0a8489b28" [ 878.394405] env[61898]: _type = "Task" [ 878.394405] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.398176] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e77070-26f9-4173-83fc-5487a9dbcc99 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.437591] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-9afa94d2-16a1-484f-96b4-8bbd93829ffe" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.437591] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-9afa94d2-16a1-484f-96b4-8bbd93829ffe" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.437591] env[61898]: DEBUG nova.network.neutron [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 878.437591] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52725cfc-3c96-23a5-e8b7-99e0a8489b28, 'name': SearchDatastore_Task, 'duration_secs': 0.017347} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.438263] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.438542] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 878.440685] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c70caa6-06f0-4416-baef-17882723fbea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.451042] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 878.451042] env[61898]: value = "task-1240855" [ 878.451042] env[61898]: _type = "Task" [ 878.451042] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.462137] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240855, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.519863] env[61898]: DEBUG nova.compute.manager [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Received event network-vif-plugged-ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 878.520142] env[61898]: DEBUG oslo_concurrency.lockutils [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] Acquiring lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.520442] env[61898]: DEBUG oslo_concurrency.lockutils [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] Lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.520694] env[61898]: DEBUG oslo_concurrency.lockutils [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] Lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.520977] env[61898]: DEBUG nova.compute.manager [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] No waiting events found dispatching network-vif-plugged-ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 878.521148] env[61898]: WARNING nova.compute.manager [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Received unexpected event network-vif-plugged-ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f for instance with vm_state building and task_state spawning. [ 878.521658] env[61898]: DEBUG nova.compute.manager [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Received event network-changed-ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 878.521658] env[61898]: DEBUG nova.compute.manager [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Refreshing instance network info cache due to event network-changed-ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 878.521761] env[61898]: DEBUG oslo_concurrency.lockutils [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] Acquiring lock "refresh_cache-9afa94d2-16a1-484f-96b4-8bbd93829ffe" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.632285] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.632285] env[61898]: DEBUG nova.compute.manager [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Instance network_info: |[{"id": "dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a", "address": "fa:16:3e:08:d8:91", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc3e4cf3-8b", "ovs_interfaceid": "dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 878.632285] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:d8:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 878.640363] env[61898]: DEBUG oslo.service.loopingcall [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 878.640684] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 878.640927] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3776dc68-de70-4fd2-b75a-fd34b543e1c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.665717] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 878.665717] env[61898]: value = "task-1240856" [ 878.665717] env[61898]: _type = "Task" [ 878.665717] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.676657] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240856, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.796954] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.796954] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 878.799069] env[61898]: DEBUG oslo_concurrency.lockutils [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.194s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.799528] env[61898]: DEBUG oslo_concurrency.lockutils [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.802096] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.388s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.804493] env[61898]: INFO nova.compute.claims [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.849255] env[61898]: INFO nova.scheduler.client.report [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted allocations for instance 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d [ 878.893047] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240854, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.146312} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.893236] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 878.894186] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438ca17a-e6ee-4113-9667-810d1f49c850 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.924858] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 80931b22-a69b-41cd-b707-13bf11111b88/80931b22-a69b-41cd-b707-13bf11111b88.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.926306] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a011b34c-7a80-497c-9ad8-1b26c0b8d165 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.944471] env[61898]: DEBUG nova.compute.manager [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 878.945965] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe1bcd4-4e3a-44b1-9d33-7bc46a93ce07 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.951579] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 878.951579] env[61898]: value = "task-1240857" [ 878.951579] env[61898]: _type = "Task" [ 878.951579] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.973244] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240857, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.973539] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240855, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493916} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.973791] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 878.974028] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.974457] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6d31bfa-bdf4-467f-b6ce-2ab28678bb7f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.979027] env[61898]: DEBUG nova.network.neutron [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 878.983480] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 878.983480] env[61898]: value = "task-1240858" [ 878.983480] env[61898]: _type = "Task" [ 878.983480] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.995959] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240858, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.142821] env[61898]: DEBUG nova.network.neutron [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Updating instance_info_cache with network_info: [{"id": "ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f", "address": "fa:16:3e:ce:df:77", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac8a171f-ba", "ovs_interfaceid": "ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.176802] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240856, 'name': CreateVM_Task, 'duration_secs': 0.451911} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.176998] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 879.177857] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.177857] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.179027] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 879.179027] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19f918cf-959a-4560-a730-9c3f23280b88 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.184291] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 879.184291] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]523ba668-3a5c-ed8a-bc43-8ceaf519b564" [ 879.184291] env[61898]: _type = "Task" [ 879.184291] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.193598] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523ba668-3a5c-ed8a-bc43-8ceaf519b564, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.310653] env[61898]: DEBUG nova.compute.utils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 879.315349] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 879.315552] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 879.359469] env[61898]: DEBUG oslo_concurrency.lockutils [None req-abbed1ae-7c7a-4be4-8172-750801ce717f tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.547s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.363212] env[61898]: DEBUG nova.policy [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f64f22a09c344e468e74742efbd05cff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a476b83a7bda4078b4690a73adfea8c9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 879.466271] env[61898]: INFO nova.compute.manager [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] instance snapshotting [ 879.467555] env[61898]: DEBUG nova.objects.instance [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'flavor' on Instance uuid 7c6aad92-6e91-48fc-89ae-5ee4c89f449c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.472958] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240857, 'name': ReconfigVM_Task, 'duration_secs': 0.373488} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.472958] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 80931b22-a69b-41cd-b707-13bf11111b88/80931b22-a69b-41cd-b707-13bf11111b88.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 879.473530] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6433ddc8-a461-4a04-8508-2f502c07d982 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.483563] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 879.483563] env[61898]: value = "task-1240859" [ 879.483563] env[61898]: _type = "Task" [ 879.483563] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.498739] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240858, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097731} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.502380] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.502723] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240859, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.503776] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757352ba-2b3f-4749-831b-6324e9c72d05 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.532669] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.533015] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7966ea55-245d-48ed-a877-ae0d657b71c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.559751] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 879.559751] env[61898]: value = "task-1240860" [ 879.559751] env[61898]: _type = "Task" [ 879.559751] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.569568] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240860, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.648026] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-9afa94d2-16a1-484f-96b4-8bbd93829ffe" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.648026] env[61898]: DEBUG nova.compute.manager [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Instance network_info: |[{"id": "ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f", "address": "fa:16:3e:ce:df:77", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac8a171f-ba", "ovs_interfaceid": "ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 879.648026] env[61898]: DEBUG oslo_concurrency.lockutils [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] Acquired lock "refresh_cache-9afa94d2-16a1-484f-96b4-8bbd93829ffe" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.648026] env[61898]: DEBUG nova.network.neutron [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Refreshing network info cache for port ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 879.648026] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:df:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69f65356-c85e-4b7f-ad28-7c7b5e8cf50c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.658882] env[61898]: DEBUG oslo.service.loopingcall [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.659830] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 879.660212] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ae80f23-fbb3-4bfb-bde1-5d382bfbe57d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.677679] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Successfully created port: d3fda812-e082-4563-9ca3-516f9e0b6e27 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.690727] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 879.690727] env[61898]: value = "task-1240861" [ 879.690727] env[61898]: _type = "Task" [ 879.690727] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.700999] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523ba668-3a5c-ed8a-bc43-8ceaf519b564, 'name': SearchDatastore_Task, 'duration_secs': 0.010622} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.701955] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.702372] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 879.702761] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.703050] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.703375] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 879.706833] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c3ca69a-a0ff-4d52-b517-2918973a9ac6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.708781] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240861, 'name': CreateVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.717542] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 879.718011] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 879.718872] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2e66c4c-94ab-4e5e-bb49-e580a2775037 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.729023] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 879.729023] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]522d1f07-d2fe-6d53-bb79-d87b9b1a231b" [ 879.729023] env[61898]: _type = "Task" [ 879.729023] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.735618] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522d1f07-d2fe-6d53-bb79-d87b9b1a231b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.820641] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 879.977174] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3d9ce5-2516-431b-9ed5-a6ee10e689ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.005868] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c308f6c-13c4-4489-96df-372d20e7bf5b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.019150] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240859, 'name': Rename_Task, 'duration_secs': 0.164159} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.028796] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 880.034247] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07694576-2676-42e6-9303-7686d125c4f4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.037019] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquiring lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.037019] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.044952] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 880.044952] env[61898]: value = "task-1240862" [ 880.044952] env[61898]: _type = "Task" [ 880.044952] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.057659] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240862, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.070942] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240860, 'name': ReconfigVM_Task, 'duration_secs': 0.301545} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.078505] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 1aa03975-f18f-4e64-836e-e991b73ee9d5/1aa03975-f18f-4e64-836e-e991b73ee9d5.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.078505] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d9846e3-3224-470c-baa8-500f6ab93f00 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.086363] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 880.086363] env[61898]: value = "task-1240863" [ 880.086363] env[61898]: _type = "Task" [ 880.086363] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.087434] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Successfully updated port: df9c8d59-f506-4a95-b90b-85b338619b4a {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 880.105432] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240863, 'name': Rename_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.205383] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240861, 'name': CreateVM_Task, 'duration_secs': 0.376584} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.205603] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 880.206588] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.206781] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.207142] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 880.207403] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dac5be0-f638-487d-b749-77236f526855 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.216393] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 880.216393] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52fb97e8-ac8b-26f8-54d9-1cc60fc72dc9" [ 880.216393] env[61898]: _type = "Task" [ 880.216393] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.231214] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52fb97e8-ac8b-26f8-54d9-1cc60fc72dc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.243086] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522d1f07-d2fe-6d53-bb79-d87b9b1a231b, 'name': SearchDatastore_Task, 'duration_secs': 0.013297} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.244170] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6b8fecc-0cea-4ed3-b99f-67074029f051 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.255036] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 880.255036] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5215ce77-7f7c-709b-acb5-27cd3cdfa7a7" [ 880.255036] env[61898]: _type = "Task" [ 880.255036] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.263520] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5215ce77-7f7c-709b-acb5-27cd3cdfa7a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.285371] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9478519e-cc53-4096-801b-748699200c61 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.294051] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfa8439-7787-4530-acbf-0898cab6670c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.332742] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9439cbe-f3ea-4082-b519-c1f446114588 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.342027] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d4f3f7-9947-4e76-9b9e-6b22ba39eeaa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.358048] env[61898]: DEBUG nova.compute.provider_tree [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.536758] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 880.537197] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9f02caae-aa44-4111-a565-1a33b47c9550 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.540655] env[61898]: DEBUG nova.compute.manager [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 880.558350] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 880.558350] env[61898]: value = "task-1240864" [ 880.558350] env[61898]: _type = "Task" [ 880.558350] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.575720] env[61898]: DEBUG oslo_vmware.api [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240862, 'name': PowerOnVM_Task, 'duration_secs': 0.508659} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.577192] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 880.577596] env[61898]: INFO nova.compute.manager [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Took 9.75 seconds to spawn the instance on the hypervisor. [ 880.578382] env[61898]: DEBUG nova.compute.manager [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 880.579658] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e5c2f8-6efa-476e-9427-8b31d2c83d21 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.591427] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240864, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.595668] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "refresh_cache-57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.595868] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "refresh_cache-57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.596076] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 880.618637] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240863, 'name': Rename_Task, 'duration_secs': 0.203568} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.620080] env[61898]: DEBUG nova.network.neutron [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Updated VIF entry in instance network info cache for port ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 880.620536] env[61898]: DEBUG nova.network.neutron [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Updating instance_info_cache with network_info: [{"id": "ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f", "address": "fa:16:3e:ce:df:77", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac8a171f-ba", "ovs_interfaceid": "ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.622129] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 880.622945] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12393d6b-7b62-408e-b739-0bfde00e7e06 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.632559] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 880.632559] env[61898]: value = "task-1240865" [ 880.632559] env[61898]: _type = "Task" [ 880.632559] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.649641] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240865, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.652133] env[61898]: DEBUG nova.compute.manager [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Received event network-vif-plugged-df9c8d59-f506-4a95-b90b-85b338619b4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 880.652276] env[61898]: DEBUG oslo_concurrency.lockutils [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] Acquiring lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.652504] env[61898]: DEBUG oslo_concurrency.lockutils [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] Lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.652684] env[61898]: DEBUG oslo_concurrency.lockutils [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] Lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.652871] env[61898]: DEBUG nova.compute.manager [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] No waiting events found dispatching network-vif-plugged-df9c8d59-f506-4a95-b90b-85b338619b4a {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 880.653068] env[61898]: WARNING nova.compute.manager [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Received unexpected event network-vif-plugged-df9c8d59-f506-4a95-b90b-85b338619b4a for instance with vm_state building and task_state spawning. [ 880.653244] env[61898]: DEBUG nova.compute.manager [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Received event network-changed-df9c8d59-f506-4a95-b90b-85b338619b4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 880.653417] env[61898]: DEBUG nova.compute.manager [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Refreshing instance network info cache due to event network-changed-df9c8d59-f506-4a95-b90b-85b338619b4a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 880.653590] env[61898]: DEBUG oslo_concurrency.lockutils [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] Acquiring lock "refresh_cache-57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.732573] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52fb97e8-ac8b-26f8-54d9-1cc60fc72dc9, 'name': SearchDatastore_Task, 'duration_secs': 0.010612} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.733318] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.733318] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 880.733586] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.766031] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5215ce77-7f7c-709b-acb5-27cd3cdfa7a7, 'name': SearchDatastore_Task, 'duration_secs': 0.010867} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.766201] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.769018] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 5323b250-fad8-4d71-81ed-c5e5eeb8aeab/5323b250-fad8-4d71-81ed-c5e5eeb8aeab.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 880.769018] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.769018] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 880.769018] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c9dd1c4-835d-4f94-a87e-9749b3de2c63 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.769689] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48a92627-828e-4dc3-97b2-f12e733fa3a4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.778363] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 880.778363] env[61898]: value = "task-1240866" [ 880.778363] env[61898]: _type = "Task" [ 880.778363] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.783158] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 880.783490] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 880.785025] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fda0e878-70f9-4f4a-a5a4-1a8775f9d175 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.791125] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240866, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.795220] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 880.795220] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f9bec5-0af5-66c2-fb20-dc8ca4f27bd0" [ 880.795220] env[61898]: _type = "Task" [ 880.795220] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.803924] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f9bec5-0af5-66c2-fb20-dc8ca4f27bd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.837526] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 880.862040] env[61898]: DEBUG nova.scheduler.client.report [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 880.868877] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 880.868877] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 880.868877] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.868877] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 880.868877] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.868877] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 880.868877] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 880.869792] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 880.870040] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 880.870236] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 880.870424] env[61898]: DEBUG nova.virt.hardware [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.871414] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0300e7a1-b19b-412b-9168-42c6a4b435e1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.881894] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147b2875-2ddf-4ccb-8a5a-19148d06ae4c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.066450] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.071653] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240864, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.126410] env[61898]: DEBUG oslo_concurrency.lockutils [req-bc9d8adc-ad1b-484a-8e48-14fd72cb220f req-4fb59850-e091-4770-9d45-810326572062 service nova] Releasing lock "refresh_cache-9afa94d2-16a1-484f-96b4-8bbd93829ffe" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.130272] env[61898]: INFO nova.compute.manager [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Took 24.04 seconds to build instance. [ 881.148229] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240865, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.162152] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.291769] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240866, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469307} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.292248] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 5323b250-fad8-4d71-81ed-c5e5eeb8aeab/5323b250-fad8-4d71-81ed-c5e5eeb8aeab.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 881.292520] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 881.292875] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-838c560e-1435-45a2-ab5f-e15515d9a544 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.302460] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 881.302460] env[61898]: value = "task-1240867" [ 881.302460] env[61898]: _type = "Task" [ 881.302460] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.313309] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f9bec5-0af5-66c2-fb20-dc8ca4f27bd0, 'name': SearchDatastore_Task, 'duration_secs': 0.009321} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.314781] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e78526c2-45ca-4dd6-9763-a7eacee259dd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.320154] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240867, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.323638] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 881.323638] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]523089b9-e8a9-7551-ea58-d0146dffa48b" [ 881.323638] env[61898]: _type = "Task" [ 881.323638] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.333370] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523089b9-e8a9-7551-ea58-d0146dffa48b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.376847] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.377455] env[61898]: DEBUG nova.compute.manager [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 881.380784] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.560s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.382190] env[61898]: INFO nova.compute.claims [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 881.437074] env[61898]: DEBUG nova.compute.manager [req-e9137894-9806-4205-b178-77a819b19a46 req-1f75db90-afda-40bc-b57a-63edd277748f service nova] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Received event network-vif-plugged-d3fda812-e082-4563-9ca3-516f9e0b6e27 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 881.437303] env[61898]: DEBUG oslo_concurrency.lockutils [req-e9137894-9806-4205-b178-77a819b19a46 req-1f75db90-afda-40bc-b57a-63edd277748f service nova] Acquiring lock "cdd5f647-2c43-4389-820d-2d39d7d20889-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.437517] env[61898]: DEBUG oslo_concurrency.lockutils [req-e9137894-9806-4205-b178-77a819b19a46 req-1f75db90-afda-40bc-b57a-63edd277748f service nova] Lock "cdd5f647-2c43-4389-820d-2d39d7d20889-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.437907] env[61898]: DEBUG oslo_concurrency.lockutils [req-e9137894-9806-4205-b178-77a819b19a46 req-1f75db90-afda-40bc-b57a-63edd277748f service nova] Lock "cdd5f647-2c43-4389-820d-2d39d7d20889-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.437907] env[61898]: DEBUG nova.compute.manager [req-e9137894-9806-4205-b178-77a819b19a46 req-1f75db90-afda-40bc-b57a-63edd277748f service nova] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] No waiting events found dispatching network-vif-plugged-d3fda812-e082-4563-9ca3-516f9e0b6e27 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 881.438055] env[61898]: WARNING nova.compute.manager [req-e9137894-9806-4205-b178-77a819b19a46 req-1f75db90-afda-40bc-b57a-63edd277748f service nova] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Received unexpected event network-vif-plugged-d3fda812-e082-4563-9ca3-516f9e0b6e27 for instance with vm_state building and task_state spawning. [ 881.476334] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Updating instance_info_cache with network_info: [{"id": "df9c8d59-f506-4a95-b90b-85b338619b4a", "address": "fa:16:3e:22:6e:63", "network": {"id": "417c9ff9-1aed-4a2f-95cd-3baf6bf12936", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-259987027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a476b83a7bda4078b4690a73adfea8c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf9c8d59-f5", "ovs_interfaceid": "df9c8d59-f506-4a95-b90b-85b338619b4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.577205] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240864, 'name': CreateSnapshot_Task, 'duration_secs': 0.781095} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.577517] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 881.578358] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f15b52-3821-4ca6-b169-d28b9cf09b0c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.632541] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4f1a146d-b084-4ce9-97bd-223f4fafa051 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "80931b22-a69b-41cd-b707-13bf11111b88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.562s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.643703] env[61898]: DEBUG oslo_vmware.api [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240865, 'name': PowerOnVM_Task, 'duration_secs': 0.666143} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.644012] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 881.644235] env[61898]: DEBUG nova.compute.manager [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 881.644990] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3455c82-edab-4f40-a5f2-e3ae374b58b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.782966] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Successfully updated port: d3fda812-e082-4563-9ca3-516f9e0b6e27 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 881.813939] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240867, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071735} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.814230] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.815028] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860d196c-6ebf-45a9-bfd3-b9ae80b73af5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.839242] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 5323b250-fad8-4d71-81ed-c5e5eeb8aeab/5323b250-fad8-4d71-81ed-c5e5eeb8aeab.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.842235] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-932d525a-6f61-4d8a-a2ac-dc9c773c222e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.862359] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523089b9-e8a9-7551-ea58-d0146dffa48b, 'name': SearchDatastore_Task, 'duration_secs': 0.009851} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.863885] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.864178] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 9afa94d2-16a1-484f-96b4-8bbd93829ffe/9afa94d2-16a1-484f-96b4-8bbd93829ffe.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 881.864507] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 881.864507] env[61898]: value = "task-1240868" [ 881.864507] env[61898]: _type = "Task" [ 881.864507] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.866867] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5963b116-4340-4141-8da3-2c6ea597b1bd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.875408] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240868, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.878161] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 881.878161] env[61898]: value = "task-1240869" [ 881.878161] env[61898]: _type = "Task" [ 881.878161] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.895664] env[61898]: DEBUG nova.compute.utils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.898514] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240869, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.898514] env[61898]: DEBUG nova.compute.manager [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 881.898514] env[61898]: DEBUG nova.network.neutron [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 881.943726] env[61898]: DEBUG nova.policy [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1dffec203adf453db746e6c2140283f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ed1f0593fdb4221b84147f56049153e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 881.978058] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "refresh_cache-57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.978317] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Instance network_info: |[{"id": "df9c8d59-f506-4a95-b90b-85b338619b4a", "address": "fa:16:3e:22:6e:63", "network": {"id": "417c9ff9-1aed-4a2f-95cd-3baf6bf12936", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-259987027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a476b83a7bda4078b4690a73adfea8c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf9c8d59-f5", "ovs_interfaceid": "df9c8d59-f506-4a95-b90b-85b338619b4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 881.978737] env[61898]: DEBUG oslo_concurrency.lockutils [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] Acquired lock "refresh_cache-57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.978984] env[61898]: DEBUG nova.network.neutron [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Refreshing network info cache for port df9c8d59-f506-4a95-b90b-85b338619b4a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 881.980572] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:6e:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df9c8d59-f506-4a95-b90b-85b338619b4a', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.989324] env[61898]: DEBUG oslo.service.loopingcall [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 881.993303] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 881.994648] env[61898]: DEBUG nova.compute.manager [req-6b6ceee9-d847-44c3-93d8-ff64b353b936 req-112796f1-41bd-4098-b194-487b51d5c4ad service nova] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Received event network-changed-d3fda812-e082-4563-9ca3-516f9e0b6e27 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 881.994890] env[61898]: DEBUG nova.compute.manager [req-6b6ceee9-d847-44c3-93d8-ff64b353b936 req-112796f1-41bd-4098-b194-487b51d5c4ad service nova] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Refreshing instance network info cache due to event network-changed-d3fda812-e082-4563-9ca3-516f9e0b6e27. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 881.995130] env[61898]: DEBUG oslo_concurrency.lockutils [req-6b6ceee9-d847-44c3-93d8-ff64b353b936 req-112796f1-41bd-4098-b194-487b51d5c4ad service nova] Acquiring lock "refresh_cache-cdd5f647-2c43-4389-820d-2d39d7d20889" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.995283] env[61898]: DEBUG oslo_concurrency.lockutils [req-6b6ceee9-d847-44c3-93d8-ff64b353b936 req-112796f1-41bd-4098-b194-487b51d5c4ad service nova] Acquired lock "refresh_cache-cdd5f647-2c43-4389-820d-2d39d7d20889" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.995466] env[61898]: DEBUG nova.network.neutron [req-6b6ceee9-d847-44c3-93d8-ff64b353b936 req-112796f1-41bd-4098-b194-487b51d5c4ad service nova] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Refreshing network info cache for port d3fda812-e082-4563-9ca3-516f9e0b6e27 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 882.000156] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ffd1f8be-8408-4167-a5f2-b4a7da713504 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.025108] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.025108] env[61898]: value = "task-1240870" [ 882.025108] env[61898]: _type = "Task" [ 882.025108] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.035680] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240870, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.098865] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 882.099289] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-05588dd6-6910-4cb4-a72e-2e254b16d4f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.106819] env[61898]: DEBUG nova.network.neutron [req-6b6ceee9-d847-44c3-93d8-ff64b353b936 req-112796f1-41bd-4098-b194-487b51d5c4ad service nova] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.110823] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 882.110823] env[61898]: value = "task-1240871" [ 882.110823] env[61898]: _type = "Task" [ 882.110823] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.122369] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240871, 'name': CloneVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.163192] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.286104] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "refresh_cache-cdd5f647-2c43-4389-820d-2d39d7d20889" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.291471] env[61898]: INFO nova.compute.manager [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Rescuing [ 882.291889] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.292140] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.292524] env[61898]: DEBUG nova.network.neutron [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 882.301910] env[61898]: DEBUG nova.network.neutron [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Successfully created port: 0d791731-b395-4858-b0b0-86de8a660e18 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.383175] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240868, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.396908] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240869, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.401731] env[61898]: DEBUG nova.compute.manager [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 882.411352] env[61898]: DEBUG nova.network.neutron [req-6b6ceee9-d847-44c3-93d8-ff64b353b936 req-112796f1-41bd-4098-b194-487b51d5c4ad service nova] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.539310] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240870, 'name': CreateVM_Task, 'duration_secs': 0.432011} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.542562] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 882.543583] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.543920] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.544258] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 882.544523] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b91d65d9-8821-4e90-8a6c-0b59338304a2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.550561] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 882.550561] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a49424-07b9-78a5-941e-77099d37bbbd" [ 882.550561] env[61898]: _type = "Task" [ 882.550561] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.571272] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a49424-07b9-78a5-941e-77099d37bbbd, 'name': SearchDatastore_Task, 'duration_secs': 0.010613} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.571483] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.572625] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.572625] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.572824] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.573095] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.577011] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b523a427-2573-4adb-8dcb-12a4150bbe73 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.588277] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.588560] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.592954] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b86864b-bbfe-4574-9124-c77d1210ba89 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.600738] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 882.600738] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52357b7b-b608-b5a3-dacc-bce1d4759a04" [ 882.600738] env[61898]: _type = "Task" [ 882.600738] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.609864] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52357b7b-b608-b5a3-dacc-bce1d4759a04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.622277] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240871, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.636664] env[61898]: DEBUG nova.network.neutron [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Updated VIF entry in instance network info cache for port df9c8d59-f506-4a95-b90b-85b338619b4a. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 882.636664] env[61898]: DEBUG nova.network.neutron [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Updating instance_info_cache with network_info: [{"id": "df9c8d59-f506-4a95-b90b-85b338619b4a", "address": "fa:16:3e:22:6e:63", "network": {"id": "417c9ff9-1aed-4a2f-95cd-3baf6bf12936", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-259987027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a476b83a7bda4078b4690a73adfea8c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf9c8d59-f5", "ovs_interfaceid": "df9c8d59-f506-4a95-b90b-85b338619b4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.738125] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e15650-7d2b-452c-82da-a3ed9144e67d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.746345] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7dd0419-ffba-4bed-8da9-01ac3fa9246a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.777141] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb81662d-53f3-40e3-9a18-b6115a489e6e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.785432] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2342dcf6-da02-4b13-ac68-956f2c23cdba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.802362] env[61898]: DEBUG nova.compute.provider_tree [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.879257] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240868, 'name': ReconfigVM_Task, 'duration_secs': 0.706944} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.879257] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 5323b250-fad8-4d71-81ed-c5e5eeb8aeab/5323b250-fad8-4d71-81ed-c5e5eeb8aeab.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 882.879257] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce35d0da-6f01-44e9-8b94-4b5ef468ae89 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.889558] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 882.889558] env[61898]: value = "task-1240872" [ 882.889558] env[61898]: _type = "Task" [ 882.889558] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.897571] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240869, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528997} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.901186] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 9afa94d2-16a1-484f-96b4-8bbd93829ffe/9afa94d2-16a1-484f-96b4-8bbd93829ffe.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 882.901186] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 882.901444] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240872, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.901675] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85edae61-caef-4e38-9085-4d9b24e8cebc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.914978] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 882.914978] env[61898]: value = "task-1240873" [ 882.914978] env[61898]: _type = "Task" [ 882.914978] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.919882] env[61898]: DEBUG oslo_concurrency.lockutils [req-6b6ceee9-d847-44c3-93d8-ff64b353b936 req-112796f1-41bd-4098-b194-487b51d5c4ad service nova] Releasing lock "refresh_cache-cdd5f647-2c43-4389-820d-2d39d7d20889" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.920237] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "refresh_cache-cdd5f647-2c43-4389-820d-2d39d7d20889" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.920397] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 882.929368] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240873, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.112089] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52357b7b-b608-b5a3-dacc-bce1d4759a04, 'name': SearchDatastore_Task, 'duration_secs': 0.009669} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.112870] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ea0d10c-a001-4f7d-9750-96aa945ae930 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.122269] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 883.122269] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525688ea-cd49-b151-bc0c-e0a4428ffe74" [ 883.122269] env[61898]: _type = "Task" [ 883.122269] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.125821] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240871, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.135522] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525688ea-cd49-b151-bc0c-e0a4428ffe74, 'name': SearchDatastore_Task, 'duration_secs': 0.011285} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.135784] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.136053] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb/57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 883.137027] env[61898]: DEBUG nova.network.neutron [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Updating instance_info_cache with network_info: [{"id": "53551414-df45-4670-abea-be494090dd14", "address": "fa:16:3e:1f:b4:46", "network": {"id": "616f0775-546c-4124-b414-c2ce3228e7ec", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-245576291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a984459656494b738b60ec791c579316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53551414-df", "ovs_interfaceid": "53551414-df45-4670-abea-be494090dd14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.138265] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81c54417-0612-451f-8ec2-0a6b805402f1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.140746] env[61898]: DEBUG oslo_concurrency.lockutils [req-2bfd7acd-4006-41e4-893e-1ce39fd870ff req-4dc1fcb0-0f98-4fbe-91a8-2fad33fb5b48 service nova] Releasing lock "refresh_cache-57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.147745] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 883.147745] env[61898]: value = "task-1240874" [ 883.147745] env[61898]: _type = "Task" [ 883.147745] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.156091] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.305153] env[61898]: DEBUG nova.scheduler.client.report [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 883.403238] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240872, 'name': Rename_Task, 'duration_secs': 0.355997} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.403546] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 883.403947] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18031a8d-8a87-43b9-9305-e33fd1fb467a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.411603] env[61898]: DEBUG nova.compute.manager [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 883.416187] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 883.416187] env[61898]: value = "task-1240875" [ 883.416187] env[61898]: _type = "Task" [ 883.416187] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.432490] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240873, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072858} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.436325] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 883.436973] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240875, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.437616] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be975096-2093-4a0a-8595-8c78543160d5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.448971] env[61898]: DEBUG nova.virt.hardware [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 883.449299] env[61898]: DEBUG nova.virt.hardware [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 883.449516] env[61898]: DEBUG nova.virt.hardware [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.449717] env[61898]: DEBUG nova.virt.hardware [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 883.449906] env[61898]: DEBUG nova.virt.hardware [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.450095] env[61898]: DEBUG nova.virt.hardware [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 883.450323] env[61898]: DEBUG nova.virt.hardware [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 883.450515] env[61898]: DEBUG nova.virt.hardware [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 883.450667] env[61898]: DEBUG nova.virt.hardware [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 883.450832] env[61898]: DEBUG nova.virt.hardware [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 883.451020] env[61898]: DEBUG nova.virt.hardware [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 883.452059] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b71e97-a0b3-4bc4-bfdc-dc93ad867853 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.475291] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 9afa94d2-16a1-484f-96b4-8bbd93829ffe/9afa94d2-16a1-484f-96b4-8bbd93829ffe.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 883.476274] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c07012ff-310e-457b-bea4-84fe75255742 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.492720] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.499402] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96192788-4686-42c0-8bb3-3e4baba32138 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.506509] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 883.506509] env[61898]: value = "task-1240876" [ 883.506509] env[61898]: _type = "Task" [ 883.506509] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.532373] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240876, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.628194] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240871, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.643466] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Releasing lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.657439] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240874, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.783597] env[61898]: DEBUG nova.network.neutron [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Updating instance_info_cache with network_info: [{"id": "d3fda812-e082-4563-9ca3-516f9e0b6e27", "address": "fa:16:3e:17:f1:03", "network": {"id": "417c9ff9-1aed-4a2f-95cd-3baf6bf12936", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-259987027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a476b83a7bda4078b4690a73adfea8c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3fda812-e0", "ovs_interfaceid": "d3fda812-e082-4563-9ca3-516f9e0b6e27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.811268] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.811881] env[61898]: DEBUG nova.compute.manager [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 883.814858] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.579s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.818617] env[61898]: DEBUG nova.objects.instance [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'resources' on Instance uuid e851d73d-58f0-486a-a95c-70d07e5faad2 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.908286] env[61898]: DEBUG nova.network.neutron [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Successfully updated port: 0d791731-b395-4858-b0b0-86de8a660e18 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.932494] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240875, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.030031] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240876, 'name': ReconfigVM_Task, 'duration_secs': 0.399194} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.030031] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 9afa94d2-16a1-484f-96b4-8bbd93829ffe/9afa94d2-16a1-484f-96b4-8bbd93829ffe.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 884.030031] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7741682-9238-4cd8-ac9a-05b40bc3fd71 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.037922] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 884.037922] env[61898]: value = "task-1240877" [ 884.037922] env[61898]: _type = "Task" [ 884.037922] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.046974] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240877, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.125086] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240871, 'name': CloneVM_Task, 'duration_secs': 1.832576} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.125959] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Created linked-clone VM from snapshot [ 884.126993] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea2190e-aeae-4ad7-b20b-e24331305241 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.137054] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Uploading image 0924447f-f2a2-454b-abe8-8a01ccf8a8b1 {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 884.161232] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240874, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513276} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.163658] env[61898]: DEBUG oslo_vmware.rw_handles [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 884.163658] env[61898]: value = "vm-267674" [ 884.163658] env[61898]: _type = "VirtualMachine" [ 884.163658] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 884.164138] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb/57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 884.164607] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.164839] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3df0b3a9-d4d2-4cb2-9331-01bad08944f1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.166756] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b121d3c9-f3bd-454f-92d8-89fd5baf8724 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.178858] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 884.178858] env[61898]: value = "task-1240878" [ 884.178858] env[61898]: _type = "Task" [ 884.178858] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.179720] env[61898]: DEBUG oslo_vmware.rw_handles [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lease: (returnval){ [ 884.179720] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528068a6-30bc-3f8c-d0b6-1fbed7039094" [ 884.179720] env[61898]: _type = "HttpNfcLease" [ 884.179720] env[61898]: } obtained for exporting VM: (result){ [ 884.179720] env[61898]: value = "vm-267674" [ 884.179720] env[61898]: _type = "VirtualMachine" [ 884.179720] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 884.180319] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the lease: (returnval){ [ 884.180319] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528068a6-30bc-3f8c-d0b6-1fbed7039094" [ 884.180319] env[61898]: _type = "HttpNfcLease" [ 884.180319] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 884.199415] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 884.199415] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528068a6-30bc-3f8c-d0b6-1fbed7039094" [ 884.199415] env[61898]: _type = "HttpNfcLease" [ 884.199415] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 884.203102] env[61898]: DEBUG oslo_vmware.rw_handles [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 884.203102] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528068a6-30bc-3f8c-d0b6-1fbed7039094" [ 884.203102] env[61898]: _type = "HttpNfcLease" [ 884.203102] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 884.203456] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.204489] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18eef2c0-96df-40d5-8327-358b9b96d30c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.207693] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.207999] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.208286] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.208584] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.208780] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.211082] env[61898]: INFO nova.compute.manager [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Terminating instance [ 884.216034] env[61898]: DEBUG oslo_vmware.rw_handles [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528927f4-3be6-3366-efa2-58c1390d6652/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 884.216290] env[61898]: DEBUG oslo_vmware.rw_handles [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528927f4-3be6-3366-efa2-58c1390d6652/disk-0.vmdk for reading. {{(pid=61898) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 884.286880] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "refresh_cache-cdd5f647-2c43-4389-820d-2d39d7d20889" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.287500] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Instance network_info: |[{"id": "d3fda812-e082-4563-9ca3-516f9e0b6e27", "address": "fa:16:3e:17:f1:03", "network": {"id": "417c9ff9-1aed-4a2f-95cd-3baf6bf12936", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-259987027-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a476b83a7bda4078b4690a73adfea8c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3fda812-e0", "ovs_interfaceid": "d3fda812-e082-4563-9ca3-516f9e0b6e27", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 884.288648] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:f1:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3fda812-e082-4563-9ca3-516f9e0b6e27', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.300602] env[61898]: DEBUG oslo.service.loopingcall [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.301351] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.301683] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b977cda-cce4-45f7-80d0-520665366eb8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.318871] env[61898]: DEBUG nova.compute.utils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 884.323801] env[61898]: DEBUG nova.compute.manager [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 884.323801] env[61898]: DEBUG nova.network.neutron [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 884.328027] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0c298539-a455-47c6-9c5f-be86f27370d4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.331649] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.331649] env[61898]: value = "task-1240880" [ 884.331649] env[61898]: _type = "Task" [ 884.331649] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.343035] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240880, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.358283] env[61898]: DEBUG nova.compute.manager [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Received event network-vif-plugged-0d791731-b395-4858-b0b0-86de8a660e18 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 884.358587] env[61898]: DEBUG oslo_concurrency.lockutils [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] Acquiring lock "11ca5129-0dc3-44b3-8f7b-215c93dac764-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.358670] env[61898]: DEBUG oslo_concurrency.lockutils [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] Lock "11ca5129-0dc3-44b3-8f7b-215c93dac764-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.358846] env[61898]: DEBUG oslo_concurrency.lockutils [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] Lock "11ca5129-0dc3-44b3-8f7b-215c93dac764-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.359046] env[61898]: DEBUG nova.compute.manager [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] No waiting events found dispatching network-vif-plugged-0d791731-b395-4858-b0b0-86de8a660e18 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 884.359217] env[61898]: WARNING nova.compute.manager [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Received unexpected event network-vif-plugged-0d791731-b395-4858-b0b0-86de8a660e18 for instance with vm_state building and task_state spawning. [ 884.359374] env[61898]: DEBUG nova.compute.manager [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Received event network-changed-0d791731-b395-4858-b0b0-86de8a660e18 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 884.359581] env[61898]: DEBUG nova.compute.manager [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Refreshing instance network info cache due to event network-changed-0d791731-b395-4858-b0b0-86de8a660e18. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 884.359789] env[61898]: DEBUG oslo_concurrency.lockutils [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] Acquiring lock "refresh_cache-11ca5129-0dc3-44b3-8f7b-215c93dac764" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.360037] env[61898]: DEBUG oslo_concurrency.lockutils [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] Acquired lock "refresh_cache-11ca5129-0dc3-44b3-8f7b-215c93dac764" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.362064] env[61898]: DEBUG nova.network.neutron [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Refreshing network info cache for port 0d791731-b395-4858-b0b0-86de8a660e18 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 884.382872] env[61898]: DEBUG nova.policy [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e2c909f4306477d8fc741ab3aac9d02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e8b71885c83418fb13e216f804ffeeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 884.411646] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquiring lock "refresh_cache-11ca5129-0dc3-44b3-8f7b-215c93dac764" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.434023] env[61898]: DEBUG oslo_vmware.api [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240875, 'name': PowerOnVM_Task, 'duration_secs': 0.900716} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.437917] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 884.438493] env[61898]: INFO nova.compute.manager [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Took 11.04 seconds to spawn the instance on the hypervisor. [ 884.438629] env[61898]: DEBUG nova.compute.manager [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 884.440615] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84556a0e-f53b-4b98-89b6-5be4a04d2c24 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.552793] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240877, 'name': Rename_Task, 'duration_secs': 0.160822} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.556044] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 884.556520] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6719957c-008b-42a4-9170-426bdb6a7347 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.565938] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 884.565938] env[61898]: value = "task-1240881" [ 884.565938] env[61898]: _type = "Task" [ 884.565938] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.583740] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240881, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.674607] env[61898]: DEBUG nova.network.neutron [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Successfully created port: dfa99f09-26b3-43d9-8c91-58911fb6fcd9 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 884.694959] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.695356] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240878, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.698234] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3711b387-f3c4-40f3-b7c6-2521b9c83cd6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.708664] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 884.708664] env[61898]: value = "task-1240882" [ 884.708664] env[61898]: _type = "Task" [ 884.708664] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.720884] env[61898]: DEBUG nova.compute.manager [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 884.721237] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.728269] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ec8723-83ca-4c1a-acab-2807671d7070 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.732628] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240882, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.736233] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc8a6484-a581-47f5-9f11-978f3a6f26dc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.749263] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ce68ff-5d79-4266-b64c-7e93bf7f1936 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.755328] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.755773] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c8792da-8033-4cf3-85de-396a2f2cfac9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.796924] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5eba439-fd86-4083-b594-593567e0ffb2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.800392] env[61898]: DEBUG oslo_vmware.api [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 884.800392] env[61898]: value = "task-1240883" [ 884.800392] env[61898]: _type = "Task" [ 884.800392] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.808700] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcd2ef2-7b6b-4e16-a46a-f41aef7da7f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.817603] env[61898]: DEBUG oslo_vmware.api [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240883, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.832151] env[61898]: DEBUG nova.compute.manager [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 884.835110] env[61898]: DEBUG nova.compute.provider_tree [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.847409] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240880, 'name': CreateVM_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.911017] env[61898]: DEBUG nova.network.neutron [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.968161] env[61898]: INFO nova.compute.manager [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Took 27.38 seconds to build instance. [ 885.073214] env[61898]: DEBUG nova.network.neutron [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.080986] env[61898]: DEBUG oslo_vmware.api [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240881, 'name': PowerOnVM_Task, 'duration_secs': 0.510989} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.081650] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 885.081650] env[61898]: INFO nova.compute.manager [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Took 9.30 seconds to spawn the instance on the hypervisor. [ 885.082033] env[61898]: DEBUG nova.compute.manager [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 885.082856] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b90b53-7798-485a-89c2-83a70be6ae05 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.196522] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240878, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.221110] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240882, 'name': PowerOffVM_Task, 'duration_secs': 0.321364} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.223302] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.228449] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf7b895-71b2-4c07-8697-e976415ed48e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.251801] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9527522-235e-429c-8fe8-97a20597d0f4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.292805] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 885.293925] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfae9f58-bf49-4880-b4fa-d0e69c55f91e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.305896] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 885.305896] env[61898]: value = "task-1240884" [ 885.305896] env[61898]: _type = "Task" [ 885.305896] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.313503] env[61898]: DEBUG oslo_vmware.api [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240883, 'name': PowerOffVM_Task, 'duration_secs': 0.310148} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.314314] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.314602] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.314958] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c2da792-c6f7-409b-bcda-0db336428b12 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.320750] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] VM already powered off {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 885.321059] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.321392] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.321602] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.321807] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.322147] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea94defa-7fd7-4ae6-bbeb-e570749e89ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.342991] env[61898]: DEBUG nova.scheduler.client.report [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 885.345657] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.345911] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.353278] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9e68b32-dcb0-4f2e-bd94-38ebc54946ba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.362614] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240880, 'name': CreateVM_Task, 'duration_secs': 0.649462} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.364621] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 885.364621] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 885.364621] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f2dbd1-231d-5e72-3536-dabd6f2758ae" [ 885.364621] env[61898]: _type = "Task" [ 885.364621] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.365480] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.366370] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.366370] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 885.367184] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70ee8cef-84e0-4499-aa66-b1f557038d3b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.377718] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 885.377718] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5280e4e3-041e-8905-c0f7-5b843faedf29" [ 885.377718] env[61898]: _type = "Task" [ 885.377718] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.384592] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f2dbd1-231d-5e72-3536-dabd6f2758ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.399471] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5280e4e3-041e-8905-c0f7-5b843faedf29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.406152] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.406696] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.407165] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleting the datastore file [datastore2] e5c38d18-18e4-47dc-8445-71d3dc0c325a {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.408207] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca20adaf-0ee9-4f7c-a89a-c7bf97815c04 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.420973] env[61898]: DEBUG oslo_vmware.api [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 885.420973] env[61898]: value = "task-1240886" [ 885.420973] env[61898]: _type = "Task" [ 885.420973] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.433481] env[61898]: DEBUG oslo_vmware.api [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240886, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.469835] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e3e2ac6-620b-4fb8-a3b9-26754171ebb8 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.909s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.576459] env[61898]: DEBUG oslo_concurrency.lockutils [req-fe2038ab-a60c-40a4-be67-dd9e6d5e8579 req-6c891679-cc61-4b7f-87de-4d61a90a8182 service nova] Releasing lock "refresh_cache-11ca5129-0dc3-44b3-8f7b-215c93dac764" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.576459] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquired lock "refresh_cache-11ca5129-0dc3-44b3-8f7b-215c93dac764" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.576946] env[61898]: DEBUG nova.network.neutron [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 885.604041] env[61898]: INFO nova.compute.manager [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Took 27.07 seconds to build instance. [ 885.696838] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.224743} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.697288] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.698280] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0166f887-b058-4b98-8036-04c37c7b5bd3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.723310] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb/57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.723761] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b2f3807-5640-4a13-9612-e0392521fb13 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.739793] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "45b8dc91-b577-4548-bf3a-32c7c936c616" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.739793] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.740027] env[61898]: INFO nova.compute.manager [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Shelving [ 885.748831] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 885.748831] env[61898]: value = "task-1240887" [ 885.748831] env[61898]: _type = "Task" [ 885.748831] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.759603] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240887, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.849583] env[61898]: DEBUG nova.compute.manager [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 885.852262] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.854891] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.617s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.855198] env[61898]: DEBUG nova.objects.instance [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61898) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 885.882321] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f2dbd1-231d-5e72-3536-dabd6f2758ae, 'name': SearchDatastore_Task, 'duration_secs': 0.020753} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.885908] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25fd8901-5310-4790-9a71-833f099c3357 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.894263] env[61898]: DEBUG nova.virt.hardware [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 885.894592] env[61898]: DEBUG nova.virt.hardware [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 885.894771] env[61898]: DEBUG nova.virt.hardware [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 885.894973] env[61898]: DEBUG nova.virt.hardware [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 885.895333] env[61898]: DEBUG nova.virt.hardware [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 885.895679] env[61898]: DEBUG nova.virt.hardware [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 885.896171] env[61898]: DEBUG nova.virt.hardware [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 885.896409] env[61898]: DEBUG nova.virt.hardware [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 885.896726] env[61898]: DEBUG nova.virt.hardware [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 885.896992] env[61898]: DEBUG nova.virt.hardware [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 885.897250] env[61898]: DEBUG nova.virt.hardware [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 885.898215] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c76f7c-40b0-4fcf-a00f-22cf763f6d87 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.902735] env[61898]: INFO nova.scheduler.client.report [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Deleted allocations for instance e851d73d-58f0-486a-a95c-70d07e5faad2 [ 885.916268] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5280e4e3-041e-8905-c0f7-5b843faedf29, 'name': SearchDatastore_Task, 'duration_secs': 0.022447} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.917052] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 885.917052] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e81c96-32e1-9687-3f48-5042c65d17b3" [ 885.917052] env[61898]: _type = "Task" [ 885.917052] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.920467] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.920989] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.921408] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.926220] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1424c304-9e22-4204-98a6-2bcf1f1f00b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.942653] env[61898]: DEBUG oslo_vmware.api [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240886, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.373195} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.954634] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.954894] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 885.955189] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 885.955449] env[61898]: INFO nova.compute.manager [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Took 1.23 seconds to destroy the instance on the hypervisor. [ 885.955807] env[61898]: DEBUG oslo.service.loopingcall [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 885.956734] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e81c96-32e1-9687-3f48-5042c65d17b3, 'name': SearchDatastore_Task, 'duration_secs': 0.013314} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.957013] env[61898]: DEBUG nova.compute.manager [-] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 885.957138] env[61898]: DEBUG nova.network.neutron [-] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 885.959154] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.959700] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 80931b22-a69b-41cd-b707-13bf11111b88/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk. {{(pid=61898) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 885.960132] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.960475] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.960715] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d62c29b7-fe48-4b7f-996e-5fdb8f824de4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.964517] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da110862-f202-4646-8ead-5245b22857cb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.974532] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 885.974532] env[61898]: value = "task-1240888" [ 885.974532] env[61898]: _type = "Task" [ 885.974532] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.979801] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.980119] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.981490] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16b726d4-deb8-4c7e-ad2f-552aa8d791db {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.988868] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.993014] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 885.993014] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ada0c8-d3d7-70ec-f34c-516b82d927e1" [ 885.993014] env[61898]: _type = "Task" [ 885.993014] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.003042] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ada0c8-d3d7-70ec-f34c-516b82d927e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.108164] env[61898]: DEBUG oslo_concurrency.lockutils [None req-461f2d59-da82-4621-ae3f-7980490c2889 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.583s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.173534] env[61898]: DEBUG nova.network.neutron [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 886.273917] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240887, 'name': ReconfigVM_Task, 'duration_secs': 0.331499} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.273917] env[61898]: DEBUG nova.network.neutron [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Successfully updated port: dfa99f09-26b3-43d9-8c91-58911fb6fcd9 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 886.274425] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb/57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 886.274927] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d78696f-f838-44bd-8b47-ae408a04cf70 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.286122] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 886.286122] env[61898]: value = "task-1240889" [ 886.286122] env[61898]: _type = "Task" [ 886.286122] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.304765] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240889, 'name': Rename_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.368026] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.368026] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.368026] env[61898]: DEBUG nova.compute.manager [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 886.368026] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1774bf80-c82b-49ea-adf7-a1d6d2089ef8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.380642] env[61898]: DEBUG nova.compute.manager [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61898) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 886.381484] env[61898]: DEBUG nova.objects.instance [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lazy-loading 'flavor' on Instance uuid 9afa94d2-16a1-484f-96b4-8bbd93829ffe {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.422609] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a65a8703-14ee-4ab2-b5ec-782497a80983 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "e851d73d-58f0-486a-a95c-70d07e5faad2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.131s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.426405] env[61898]: DEBUG oslo_concurrency.lockutils [req-621c053b-3c48-419c-ab4e-ed95774d4699 req-f2530535-325a-4873-9dcd-cf7e2552dcec service nova] Acquired lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.426405] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148104dc-f06d-490b-ba07-365161a04be7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.437567] env[61898]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 886.440148] env[61898]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=61898) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 886.440148] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f59b8fa-8832-44a2-9224-aceae561941c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.452209] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3306144e-b191-46b4-b365-09050a9d2021 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.494608] env[61898]: ERROR root [req-621c053b-3c48-419c-ab4e-ed95774d4699 req-f2530535-325a-4873-9dcd-cf7e2552dcec service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-267617' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-267617' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-267617' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-267617'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-267617' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-267617' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-267617'}\n"]: nova.exception.InstanceNotFound: Instance e851d73d-58f0-486a-a95c-70d07e5faad2 could not be found. [ 886.494928] env[61898]: DEBUG oslo_concurrency.lockutils [req-621c053b-3c48-419c-ab4e-ed95774d4699 req-f2530535-325a-4873-9dcd-cf7e2552dcec service nova] Releasing lock "e851d73d-58f0-486a-a95c-70d07e5faad2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.495235] env[61898]: DEBUG nova.compute.manager [req-621c053b-3c48-419c-ab4e-ed95774d4699 req-f2530535-325a-4873-9dcd-cf7e2552dcec service nova] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Detach interface failed, port_id=8658c19e-7e0e-473b-a26d-7bb0da23b75f, reason: Instance e851d73d-58f0-486a-a95c-70d07e5faad2 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 886.500627] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240888, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.514273] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ada0c8-d3d7-70ec-f34c-516b82d927e1, 'name': SearchDatastore_Task, 'duration_secs': 0.014194} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.515217] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55e66524-d53c-437a-86d0-1c0c8f95ba5c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.525258] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 886.525258] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a83b3a-2ba2-081f-4ed3-8ed8d10b3caf" [ 886.525258] env[61898]: _type = "Task" [ 886.525258] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.536923] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a83b3a-2ba2-081f-4ed3-8ed8d10b3caf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.640781] env[61898]: DEBUG nova.compute.manager [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Received event network-vif-plugged-dfa99f09-26b3-43d9-8c91-58911fb6fcd9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 886.640987] env[61898]: DEBUG oslo_concurrency.lockutils [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] Acquiring lock "cf428138-4d0d-43bf-a654-06a62a82c9a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.641644] env[61898]: DEBUG oslo_concurrency.lockutils [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] Lock "cf428138-4d0d-43bf-a654-06a62a82c9a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.641961] env[61898]: DEBUG oslo_concurrency.lockutils [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] Lock "cf428138-4d0d-43bf-a654-06a62a82c9a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.642290] env[61898]: DEBUG nova.compute.manager [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] No waiting events found dispatching network-vif-plugged-dfa99f09-26b3-43d9-8c91-58911fb6fcd9 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 886.642617] env[61898]: WARNING nova.compute.manager [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Received unexpected event network-vif-plugged-dfa99f09-26b3-43d9-8c91-58911fb6fcd9 for instance with vm_state building and task_state spawning. [ 886.642934] env[61898]: DEBUG nova.compute.manager [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Received event network-changed-dfa99f09-26b3-43d9-8c91-58911fb6fcd9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 886.643299] env[61898]: DEBUG nova.compute.manager [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Refreshing instance network info cache due to event network-changed-dfa99f09-26b3-43d9-8c91-58911fb6fcd9. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 886.643572] env[61898]: DEBUG oslo_concurrency.lockutils [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] Acquiring lock "refresh_cache-cf428138-4d0d-43bf-a654-06a62a82c9a1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.643791] env[61898]: DEBUG oslo_concurrency.lockutils [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] Acquired lock "refresh_cache-cf428138-4d0d-43bf-a654-06a62a82c9a1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.644049] env[61898]: DEBUG nova.network.neutron [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Refreshing network info cache for port dfa99f09-26b3-43d9-8c91-58911fb6fcd9 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 886.658522] env[61898]: DEBUG nova.network.neutron [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Updating instance_info_cache with network_info: [{"id": "0d791731-b395-4858-b0b0-86de8a660e18", "address": "fa:16:3e:7b:27:63", "network": {"id": "24787caa-0ab1-4d06-b31c-d9910fc68de3", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1660262411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ed1f0593fdb4221b84147f56049153e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d791731-b3", "ovs_interfaceid": "0d791731-b395-4858-b0b0-86de8a660e18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.767033] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 886.767033] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab756676-0ffe-4304-88c1-9e72aa66b7c7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.776066] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "refresh_cache-cf428138-4d0d-43bf-a654-06a62a82c9a1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.776066] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 886.776066] env[61898]: value = "task-1240890" [ 886.776066] env[61898]: _type = "Task" [ 886.776066] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.787639] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240890, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.800875] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240889, 'name': Rename_Task, 'duration_secs': 0.209955} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.800875] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 886.801694] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c1c7a82-5b0f-4b30-ac31-bb1ebfa90765 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.812035] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 886.812035] env[61898]: value = "task-1240891" [ 886.812035] env[61898]: _type = "Task" [ 886.812035] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.821444] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240891, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.824272] env[61898]: DEBUG nova.compute.manager [req-c99c6a4a-9c87-4823-8e47-b79be41782d7 req-8c2fc8f7-d74f-41e4-b1e9-dee5f0393807 service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Received event network-vif-deleted-231d39d3-2188-4318-a44d-7fbd419d0624 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 886.824367] env[61898]: INFO nova.compute.manager [req-c99c6a4a-9c87-4823-8e47-b79be41782d7 req-8c2fc8f7-d74f-41e4-b1e9-dee5f0393807 service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Neutron deleted interface 231d39d3-2188-4318-a44d-7fbd419d0624; detaching it from the instance and deleting it from the info cache [ 886.824484] env[61898]: DEBUG nova.network.neutron [req-c99c6a4a-9c87-4823-8e47-b79be41782d7 req-8c2fc8f7-d74f-41e4-b1e9-dee5f0393807 service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.868084] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8d6a7bdd-ca51-46bd-91bd-f1cd9413d570 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.869210] env[61898]: DEBUG oslo_concurrency.lockutils [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.168s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.869553] env[61898]: DEBUG nova.objects.instance [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lazy-loading 'resources' on Instance uuid 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.991712] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240888, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.688924} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.992099] env[61898]: INFO nova.virt.vmwareapi.ds_util [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 80931b22-a69b-41cd-b707-13bf11111b88/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk. [ 886.993642] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01eb597-5599-4229-9cf9-ea9a4e0b74e7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.025370] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 80931b22-a69b-41cd-b707-13bf11111b88/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.025798] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17c53f47-7bdf-4e48-9aa7-b45988214b20 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.051526] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a83b3a-2ba2-081f-4ed3-8ed8d10b3caf, 'name': SearchDatastore_Task, 'duration_secs': 0.053762} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.053294] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.053669] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] cdd5f647-2c43-4389-820d-2d39d7d20889/cdd5f647-2c43-4389-820d-2d39d7d20889.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 887.054114] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 887.054114] env[61898]: value = "task-1240892" [ 887.054114] env[61898]: _type = "Task" [ 887.054114] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.054359] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-433be300-acab-47ae-a8e7-9108087ed67c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.068610] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240892, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.070395] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 887.070395] env[61898]: value = "task-1240893" [ 887.070395] env[61898]: _type = "Task" [ 887.070395] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.080283] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240893, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.129048] env[61898]: DEBUG nova.network.neutron [-] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.161344] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Releasing lock "refresh_cache-11ca5129-0dc3-44b3-8f7b-215c93dac764" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.161735] env[61898]: DEBUG nova.compute.manager [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Instance network_info: |[{"id": "0d791731-b395-4858-b0b0-86de8a660e18", "address": "fa:16:3e:7b:27:63", "network": {"id": "24787caa-0ab1-4d06-b31c-d9910fc68de3", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1660262411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ed1f0593fdb4221b84147f56049153e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d791731-b3", "ovs_interfaceid": "0d791731-b395-4858-b0b0-86de8a660e18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 887.162711] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:27:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ce17e10e-2fb0-4191-afee-e2b89fa15074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d791731-b395-4858-b0b0-86de8a660e18', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 887.174415] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Creating folder: Project (2ed1f0593fdb4221b84147f56049153e). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 887.175745] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2fe74d97-db09-41bf-a8c6-3fda320c7cd3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.191356] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Created folder: Project (2ed1f0593fdb4221b84147f56049153e) in parent group-v267550. [ 887.191786] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Creating folder: Instances. Parent ref: group-v267676. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 887.192717] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e4576bc7-e5a3-4d9e-b02c-f4f4d72c83e9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.207624] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Created folder: Instances in parent group-v267676. [ 887.208081] env[61898]: DEBUG oslo.service.loopingcall [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 887.208582] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 887.208681] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76b03d12-fb2c-4984-bcb3-f9a88be4c521 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.236925] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 887.236925] env[61898]: value = "task-1240896" [ 887.236925] env[61898]: _type = "Task" [ 887.236925] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.255359] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240896, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.256458] env[61898]: DEBUG nova.network.neutron [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.297030] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240890, 'name': PowerOffVM_Task, 'duration_secs': 0.279286} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.297780] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 887.298811] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618c285c-076a-4c54-a0d0-76c1a471f49c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.330036] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2e8621-9b36-4e52-9671-a8e66f21cfe7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.333394] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c61b6fee-be46-459f-903a-24c64f345c05 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.354520] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240891, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.358584] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d58420-6464-432d-963a-888034d20d2d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.392647] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 887.394645] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4407fad4-f9c9-457a-806d-bbbd7cfe1c7f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.409724] env[61898]: DEBUG nova.compute.manager [req-c99c6a4a-9c87-4823-8e47-b79be41782d7 req-8c2fc8f7-d74f-41e4-b1e9-dee5f0393807 service nova] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Detach interface failed, port_id=231d39d3-2188-4318-a44d-7fbd419d0624, reason: Instance e5c38d18-18e4-47dc-8445-71d3dc0c325a could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 887.420244] env[61898]: DEBUG oslo_vmware.api [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 887.420244] env[61898]: value = "task-1240897" [ 887.420244] env[61898]: _type = "Task" [ 887.420244] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.439465] env[61898]: DEBUG oslo_vmware.api [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.576899] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240892, 'name': ReconfigVM_Task, 'duration_secs': 0.454522} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.581600] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 80931b22-a69b-41cd-b707-13bf11111b88/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 887.581988] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8b21a5-276e-409a-9b1a-8490d507b351 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.615577] env[61898]: DEBUG nova.network.neutron [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.624530] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b64fa8a8-3aeb-4ea2-9abc-a1c836cf1e9d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.638525] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240893, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.640143] env[61898]: INFO nova.compute.manager [-] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Took 1.68 seconds to deallocate network for instance. [ 887.656441] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 887.656441] env[61898]: value = "task-1240898" [ 887.656441] env[61898]: _type = "Task" [ 887.656441] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.668774] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240898, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.748332] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240896, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.810474] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db71d96a-7803-483f-b0a7-710ccc574f54 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.822355] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750d4e20-bc19-429f-869a-6759ada71b0f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.859023] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3262397-95db-4404-927d-1f752bd16ea7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.863194] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 887.863533] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240891, 'name': PowerOnVM_Task, 'duration_secs': 0.591265} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.863785] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6e3fb218-c3d4-4f8d-a82f-1ba3cf87ca64 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.865927] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 887.866150] env[61898]: INFO nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Took 9.54 seconds to spawn the instance on the hypervisor. [ 887.866333] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 887.867138] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1a206c-dc4d-4362-8b3e-a285703465ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.874467] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a14f5e-6a45-46ad-9250-1c49b0cd9aa0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.880916] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 887.880916] env[61898]: value = "task-1240899" [ 887.880916] env[61898]: _type = "Task" [ 887.880916] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.896857] env[61898]: DEBUG nova.compute.provider_tree [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.904589] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240899, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.931187] env[61898]: DEBUG oslo_vmware.api [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240897, 'name': PowerOffVM_Task, 'duration_secs': 0.31267} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.931606] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 887.931724] env[61898]: DEBUG nova.compute.manager [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 887.932445] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32cdec8-83e2-4b65-9823-dc213302cef3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.088777] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240893, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560014} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.089205] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] cdd5f647-2c43-4389-820d-2d39d7d20889/cdd5f647-2c43-4389-820d-2d39d7d20889.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 888.090028] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 888.090028] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d02115c5-86a0-4524-84ef-af800996a4b0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.098351] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 888.098351] env[61898]: value = "task-1240900" [ 888.098351] env[61898]: _type = "Task" [ 888.098351] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.109647] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240900, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.142281] env[61898]: DEBUG oslo_concurrency.lockutils [req-48f2d8d9-27d0-47f3-9118-e9836493f869 req-425d8729-0e62-4562-a6ff-73962a718cd5 service nova] Releasing lock "refresh_cache-cf428138-4d0d-43bf-a654-06a62a82c9a1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.142281] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "refresh_cache-cf428138-4d0d-43bf-a654-06a62a82c9a1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.142281] env[61898]: DEBUG nova.network.neutron [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 888.159228] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.173193] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240898, 'name': ReconfigVM_Task, 'duration_secs': 0.358431} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.173525] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.173795] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f3e5f10-e9fe-43b3-952f-ce8ce685c89f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.184051] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 888.184051] env[61898]: value = "task-1240901" [ 888.184051] env[61898]: _type = "Task" [ 888.184051] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.193008] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.249958] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240896, 'name': CreateVM_Task, 'duration_secs': 0.558617} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.249958] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 888.250294] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.250468] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.250848] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 888.251145] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39f1fa5d-1fd5-4989-ae68-c4069067a95a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.256797] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for the task: (returnval){ [ 888.256797] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52191bc4-df24-6a13-6e51-650dbbd18efb" [ 888.256797] env[61898]: _type = "Task" [ 888.256797] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.266572] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52191bc4-df24-6a13-6e51-650dbbd18efb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.288655] env[61898]: DEBUG nova.compute.manager [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Stashing vm_state: active {{(pid=61898) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 888.402479] env[61898]: DEBUG nova.scheduler.client.report [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 888.406021] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240899, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.406578] env[61898]: INFO nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Took 29.77 seconds to build instance. [ 888.444622] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8c713fc0-ceca-4b50-afce-9e703187b52b tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.079s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.610305] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240900, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074293} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.610620] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 888.611546] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a0282d-e171-4b51-be93-1346eadf8269 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.636422] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] cdd5f647-2c43-4389-820d-2d39d7d20889/cdd5f647-2c43-4389-820d-2d39d7d20889.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.636822] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46ce8a54-378e-4950-ab17-424cf924f799 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.660508] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 888.660508] env[61898]: value = "task-1240902" [ 888.660508] env[61898]: _type = "Task" [ 888.660508] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.669955] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240902, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.695440] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240901, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.701711] env[61898]: DEBUG nova.network.neutron [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 888.771031] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52191bc4-df24-6a13-6e51-650dbbd18efb, 'name': SearchDatastore_Task, 'duration_secs': 0.011152} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.771420] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.771747] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 888.772070] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.772298] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.772555] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 888.772887] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4247d1b3-918b-43d5-a768-f1e00fafe7b7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.785216] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 888.785533] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 888.786414] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c42e8231-b5b7-4efa-9cad-9873e3ca3f64 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.798480] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for the task: (returnval){ [ 888.798480] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ba6dc2-cc89-501a-daa0-097dd1b94edf" [ 888.798480] env[61898]: _type = "Task" [ 888.798480] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.811630] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ba6dc2-cc89-501a-daa0-097dd1b94edf, 'name': SearchDatastore_Task, 'duration_secs': 0.010776} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.812812] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.814167] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b6da801-8f7d-4af3-afca-e96a886b3b8e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.821157] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for the task: (returnval){ [ 888.821157] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f02a1d-66bb-0f65-b83b-235104db545e" [ 888.821157] env[61898]: _type = "Task" [ 888.821157] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.830896] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f02a1d-66bb-0f65-b83b-235104db545e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.900083] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240899, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.909269] env[61898]: DEBUG oslo_concurrency.lockutils [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.040s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.911722] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.289s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.912393] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.215s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.912727] env[61898]: DEBUG nova.objects.instance [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lazy-loading 'resources' on Instance uuid 070bc0cc-ff77-48b8-bd08-f17fe69e25af {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 888.933558] env[61898]: INFO nova.scheduler.client.report [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleted allocations for instance 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5 [ 889.018591] env[61898]: DEBUG nova.network.neutron [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Updating instance_info_cache with network_info: [{"id": "dfa99f09-26b3-43d9-8c91-58911fb6fcd9", "address": "fa:16:3e:94:ab:75", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfa99f09-26", "ovs_interfaceid": "dfa99f09-26b3-43d9-8c91-58911fb6fcd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.174947] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240902, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.197673] env[61898]: DEBUG oslo_vmware.api [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240901, 'name': PowerOnVM_Task, 'duration_secs': 0.638757} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.198101] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.201531] env[61898]: DEBUG nova.compute.manager [None req-6b68ef5c-c38e-4246-bb14-473ed43b72ca tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 889.202567] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf388868-15f4-49db-afb5-8e845c2024a3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.333419] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f02a1d-66bb-0f65-b83b-235104db545e, 'name': SearchDatastore_Task, 'duration_secs': 0.010331} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.333715] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.333983] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 11ca5129-0dc3-44b3-8f7b-215c93dac764/11ca5129-0dc3-44b3-8f7b-215c93dac764.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 889.334263] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a76a3ec-fa72-47f8-a5b5-51cb69468da2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.342235] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for the task: (returnval){ [ 889.342235] env[61898]: value = "task-1240903" [ 889.342235] env[61898]: _type = "Task" [ 889.342235] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.351636] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.401211] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240899, 'name': CreateSnapshot_Task, 'duration_secs': 1.034339} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.401574] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 889.402533] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b78704-3d20-47df-a1c9-ff760ec7f37b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.417183] env[61898]: DEBUG nova.objects.instance [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lazy-loading 'numa_topology' on Instance uuid 070bc0cc-ff77-48b8-bd08-f17fe69e25af {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.443080] env[61898]: DEBUG oslo_concurrency.lockutils [None req-46634ce9-544f-4604-b1ef-422035189754 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.754s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.521801] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "refresh_cache-cf428138-4d0d-43bf-a654-06a62a82c9a1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.522177] env[61898]: DEBUG nova.compute.manager [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Instance network_info: |[{"id": "dfa99f09-26b3-43d9-8c91-58911fb6fcd9", "address": "fa:16:3e:94:ab:75", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfa99f09-26", "ovs_interfaceid": "dfa99f09-26b3-43d9-8c91-58911fb6fcd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 889.522757] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:ab:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dfa99f09-26b3-43d9-8c91-58911fb6fcd9', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 889.531108] env[61898]: DEBUG oslo.service.loopingcall [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 889.532051] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 889.532051] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0cecdc78-dd1d-436c-b1af-b6dfc63a7eea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.557645] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 889.557645] env[61898]: value = "task-1240904" [ 889.557645] env[61898]: _type = "Task" [ 889.557645] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.567376] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240904, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.677800] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240902, 'name': ReconfigVM_Task, 'duration_secs': 0.589493} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.678284] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Reconfigured VM instance instance-00000055 to attach disk [datastore1] cdd5f647-2c43-4389-820d-2d39d7d20889/cdd5f647-2c43-4389-820d-2d39d7d20889.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 889.679168] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1f55a36-224a-420d-a727-d48e01250e21 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.704950] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 889.704950] env[61898]: value = "task-1240905" [ 889.704950] env[61898]: _type = "Task" [ 889.704950] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.722708] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240905, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.811041] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.811247] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.853714] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240903, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.927324] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 889.927888] env[61898]: DEBUG nova.objects.base [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Object Instance<070bc0cc-ff77-48b8-bd08-f17fe69e25af> lazy-loaded attributes: resources,numa_topology {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 889.930850] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c9bf4a8f-c8ee-4912-b563-31c661e0df83 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.940571] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 889.940571] env[61898]: value = "task-1240906" [ 889.940571] env[61898]: _type = "Task" [ 889.940571] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.950858] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240906, 'name': CloneVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.070028] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240904, 'name': CreateVM_Task, 'duration_secs': 0.483151} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.070192] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 890.071044] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.071348] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.071865] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 890.075212] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d45e2d2-1f79-4e66-8844-eea51c978120 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.077872] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.078145] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.078368] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.078580] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.078760] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.081206] env[61898]: INFO nova.compute.manager [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Terminating instance [ 890.089471] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 890.089471] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52fcb70b-1301-30ab-a351-fe73c1f1bfdb" [ 890.089471] env[61898]: _type = "Task" [ 890.089471] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.104557] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52fcb70b-1301-30ab-a351-fe73c1f1bfdb, 'name': SearchDatastore_Task, 'duration_secs': 0.011141} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.104976] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.105292] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.105550] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.105717] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.105901] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.106875] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d053cca-1b91-4d5d-a592-eb6b79d0022c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.115778] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.116087] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 890.117045] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf4a77f7-848d-4468-aeed-3ff3013f2d58 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.125275] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 890.125275] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]520531b5-f4cc-8d7f-1d29-d57cca735e51" [ 890.125275] env[61898]: _type = "Task" [ 890.125275] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.142568] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520531b5-f4cc-8d7f-1d29-d57cca735e51, 'name': SearchDatastore_Task, 'duration_secs': 0.011322} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.146469] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01e7d1b2-9805-4985-b17f-47052c7a152e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.154103] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 890.154103] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521fd737-cf31-4b30-44e4-9cbc24a1891b" [ 890.154103] env[61898]: _type = "Task" [ 890.154103] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.165351] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521fd737-cf31-4b30-44e4-9cbc24a1891b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.218360] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240905, 'name': Rename_Task, 'duration_secs': 0.245665} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.218746] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 890.219035] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-532b4aa2-bd48-4c99-bf85-26bc7c250c6a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.228523] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 890.228523] env[61898]: value = "task-1240907" [ 890.228523] env[61898]: _type = "Task" [ 890.228523] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.240952] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240907, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.261159] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afadc6f-9949-42b5-a0a3-480bddcd997a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.270525] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013c3dc7-256c-424d-9617-0848b9fee8c9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.303527] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df482e76-cb8f-4685-bdba-39d9a47d120c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.314048] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b7efc1-4987-4de8-aef3-bcf89c574c54 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.319398] env[61898]: DEBUG nova.compute.manager [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 890.334514] env[61898]: DEBUG nova.compute.provider_tree [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.353497] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240903, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.453877] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240906, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.586718] env[61898]: DEBUG nova.compute.manager [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 890.587017] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 890.588131] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73c43b2-f163-4fa4-b908-7145c5b1186a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.611865] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 890.611865] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-880b7c65-42dc-46a4-a883-78098f16e745 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.666273] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521fd737-cf31-4b30-44e4-9cbc24a1891b, 'name': SearchDatastore_Task, 'duration_secs': 0.01539} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.669045] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.669045] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] cf428138-4d0d-43bf-a654-06a62a82c9a1/cf428138-4d0d-43bf-a654-06a62a82c9a1.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 890.670359] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-610eaa95-b4da-491f-9fdb-eb615f3765ab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.681084] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 890.681084] env[61898]: value = "task-1240909" [ 890.681084] env[61898]: _type = "Task" [ 890.681084] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.692928] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240909, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.699231] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 890.699499] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 890.699707] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleting the datastore file [datastore2] 9afa94d2-16a1-484f-96b4-8bbd93829ffe {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 890.699996] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a81850dc-b973-4658-9e12-3ebb91b0b7bb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.716630] env[61898]: DEBUG oslo_vmware.api [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 890.716630] env[61898]: value = "task-1240910" [ 890.716630] env[61898]: _type = "Task" [ 890.716630] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.727479] env[61898]: DEBUG oslo_vmware.api [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.738993] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240907, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.839724] env[61898]: DEBUG nova.scheduler.client.report [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 890.859394] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240903, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.499262} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.860684] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 11ca5129-0dc3-44b3-8f7b-215c93dac764/11ca5129-0dc3-44b3-8f7b-215c93dac764.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 890.860955] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 890.862112] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 890.862375] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be1f25a2-319b-4737-986e-efd6e5aa5c7a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.871674] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for the task: (returnval){ [ 890.871674] env[61898]: value = "task-1240911" [ 890.871674] env[61898]: _type = "Task" [ 890.871674] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.883295] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240911, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.955084] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240906, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.178331] env[61898]: INFO nova.compute.manager [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Unrescuing [ 891.178331] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.178331] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquired lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.178528] env[61898]: DEBUG nova.network.neutron [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.196089] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240909, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.230925] env[61898]: DEBUG oslo_vmware.api [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1240910, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.317882} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.235497] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 891.235924] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 891.236387] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 891.236712] env[61898]: INFO nova.compute.manager [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Took 0.65 seconds to destroy the instance on the hypervisor. [ 891.237041] env[61898]: DEBUG oslo.service.loopingcall [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 891.237758] env[61898]: DEBUG nova.compute.manager [-] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 891.237949] env[61898]: DEBUG nova.network.neutron [-] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 891.249483] env[61898]: DEBUG oslo_vmware.api [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240907, 'name': PowerOnVM_Task, 'duration_secs': 0.796238} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.249843] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.250113] env[61898]: INFO nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Took 10.41 seconds to spawn the instance on the hypervisor. [ 891.250312] env[61898]: DEBUG nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 891.251302] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acefe1f-ba52-4003-86ef-da79d7d30182 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.347594] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.435s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.350992] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.821s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.355201] env[61898]: DEBUG nova.objects.instance [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lazy-loading 'resources' on Instance uuid 01685478-9d68-4edd-8dff-7d63fcd8bcd3 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 891.387047] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240911, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.165662} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.387571] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 891.388552] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c2a28f-0fdb-46d3-8f37-d96874cfc5a9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.416152] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 11ca5129-0dc3-44b3-8f7b-215c93dac764/11ca5129-0dc3-44b3-8f7b-215c93dac764.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.417300] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36456cc5-d5e9-4b15-b817-0e02ed3c469d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.441614] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for the task: (returnval){ [ 891.441614] env[61898]: value = "task-1240912" [ 891.441614] env[61898]: _type = "Task" [ 891.441614] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.459628] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240906, 'name': CloneVM_Task} progress is 95%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.463393] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240912, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.695514] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240909, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.726093} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.695887] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] cf428138-4d0d-43bf-a654-06a62a82c9a1/cf428138-4d0d-43bf-a654-06a62a82c9a1.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 891.696152] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 891.696445] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-09df4880-b967-4248-9fbb-0dafaf62e1ff {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.705870] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 891.705870] env[61898]: value = "task-1240913" [ 891.705870] env[61898]: _type = "Task" [ 891.705870] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.719383] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240913, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.772355] env[61898]: INFO nova.compute.manager [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Took 33.10 seconds to build instance. [ 891.863142] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0af8b637-848e-4c9c-8d43-c7155174aea8 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 39.687s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.864607] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 16.593s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.864899] env[61898]: INFO nova.compute.manager [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Unshelving [ 891.957719] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240912, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.965359] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240906, 'name': CloneVM_Task, 'duration_secs': 1.950847} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.965757] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Created linked-clone VM from snapshot [ 891.966865] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d857a19e-c7d4-4d32-bea7-90d972c3d140 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.982741] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Uploading image 038581bd-8ae3-45c6-8697-83c7fb01abff {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 892.020221] env[61898]: DEBUG oslo_vmware.rw_handles [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 892.020221] env[61898]: value = "vm-267681" [ 892.020221] env[61898]: _type = "VirtualMachine" [ 892.020221] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 892.020636] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-cb3231eb-68c9-46f1-b1ec-6fabb61b3ce9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.031747] env[61898]: DEBUG oslo_vmware.rw_handles [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lease: (returnval){ [ 892.031747] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b61ab8-0c3f-f702-9e09-5610e1ec0f7f" [ 892.031747] env[61898]: _type = "HttpNfcLease" [ 892.031747] env[61898]: } obtained for exporting VM: (result){ [ 892.031747] env[61898]: value = "vm-267681" [ 892.031747] env[61898]: _type = "VirtualMachine" [ 892.031747] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 892.035058] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the lease: (returnval){ [ 892.035058] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b61ab8-0c3f-f702-9e09-5610e1ec0f7f" [ 892.035058] env[61898]: _type = "HttpNfcLease" [ 892.035058] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 892.046764] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 892.046764] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b61ab8-0c3f-f702-9e09-5610e1ec0f7f" [ 892.046764] env[61898]: _type = "HttpNfcLease" [ 892.046764] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 892.219591] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240913, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094932} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.219591] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 892.222136] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25418a6a-7621-4c89-a2fa-a35818e4b39f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.248094] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] cf428138-4d0d-43bf-a654-06a62a82c9a1/cf428138-4d0d-43bf-a654-06a62a82c9a1.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 892.250153] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc3da238-2e9b-4b28-b53a-931edda59d09 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.265804] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78729031-71e9-4503-8748-706de5ab8bcb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.276284] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb0f105-9083-432d-bd07-94f98684515e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.281665] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a3b4f62b-9971-48ea-a00f-656bde9fa2a3 tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "cdd5f647-2c43-4389-820d-2d39d7d20889" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.623s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.282428] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 892.282428] env[61898]: value = "task-1240915" [ 892.282428] env[61898]: _type = "Task" [ 892.282428] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.313774] env[61898]: DEBUG nova.network.neutron [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Updating instance_info_cache with network_info: [{"id": "53551414-df45-4670-abea-be494090dd14", "address": "fa:16:3e:1f:b4:46", "network": {"id": "616f0775-546c-4124-b414-c2ce3228e7ec", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-245576291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a984459656494b738b60ec791c579316", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53551414-df", "ovs_interfaceid": "53551414-df45-4670-abea-be494090dd14", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.316054] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-740e37b5-5389-4b91-b8ec-dfb5ca5320fe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.323822] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240915, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.327532] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24c4cfd-0102-4114-b527-a526440383c4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.345096] env[61898]: DEBUG nova.compute.provider_tree [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.462542] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240912, 'name': ReconfigVM_Task, 'duration_secs': 0.679477} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.462979] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 11ca5129-0dc3-44b3-8f7b-215c93dac764/11ca5129-0dc3-44b3-8f7b-215c93dac764.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.463917] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08f4b315-de93-4498-8691-fb0611bfeb5d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.474430] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for the task: (returnval){ [ 892.474430] env[61898]: value = "task-1240916" [ 892.474430] env[61898]: _type = "Task" [ 892.474430] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.489044] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240916, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.544781] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 892.544781] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b61ab8-0c3f-f702-9e09-5610e1ec0f7f" [ 892.544781] env[61898]: _type = "HttpNfcLease" [ 892.544781] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 892.545149] env[61898]: DEBUG oslo_vmware.rw_handles [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 892.545149] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b61ab8-0c3f-f702-9e09-5610e1ec0f7f" [ 892.545149] env[61898]: _type = "HttpNfcLease" [ 892.545149] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 892.545975] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42b55df-763c-419d-b147-2524b291b928 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.558068] env[61898]: DEBUG oslo_vmware.rw_handles [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e875ed-bb57-cb3d-b7f3-791537dbccab/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 892.558068] env[61898]: DEBUG oslo_vmware.rw_handles [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e875ed-bb57-cb3d-b7f3-791537dbccab/disk-0.vmdk for reading. {{(pid=61898) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 892.618340] env[61898]: DEBUG nova.compute.manager [req-9e9e0db8-5644-421a-8cc4-2896eeb4f22e req-1d3d29c7-09b4-4e19-8073-da2a36eb5093 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Received event network-vif-deleted-ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 892.618608] env[61898]: INFO nova.compute.manager [req-9e9e0db8-5644-421a-8cc4-2896eeb4f22e req-1d3d29c7-09b4-4e19-8073-da2a36eb5093 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Neutron deleted interface ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f; detaching it from the instance and deleting it from the info cache [ 892.618731] env[61898]: DEBUG nova.network.neutron [req-9e9e0db8-5644-421a-8cc4-2896eeb4f22e req-1d3d29c7-09b4-4e19-8073-da2a36eb5093 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.684725] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5eee5a12-5e90-41d1-8004-76a9b3607975 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.727852] env[61898]: DEBUG nova.network.neutron [-] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.795722] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240915, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.820380] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Releasing lock "refresh_cache-80931b22-a69b-41cd-b707-13bf11111b88" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.821130] env[61898]: DEBUG nova.objects.instance [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lazy-loading 'flavor' on Instance uuid 80931b22-a69b-41cd-b707-13bf11111b88 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 892.848219] env[61898]: DEBUG nova.scheduler.client.report [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 892.875778] env[61898]: DEBUG nova.compute.utils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 892.986421] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240916, 'name': Rename_Task, 'duration_secs': 0.217291} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.986768] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 892.986932] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-902564b2-e9f2-4d38-a459-4c3ae8dfdef4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.995404] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for the task: (returnval){ [ 892.995404] env[61898]: value = "task-1240917" [ 892.995404] env[61898]: _type = "Task" [ 892.995404] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.006944] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240917, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.123085] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be838956-4181-409b-92c8-84bba7e7130a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.135821] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c4b060-2c36-4c15-9ea5-70711a11354f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.177706] env[61898]: DEBUG nova.compute.manager [req-9e9e0db8-5644-421a-8cc4-2896eeb4f22e req-1d3d29c7-09b4-4e19-8073-da2a36eb5093 service nova] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Detach interface failed, port_id=ac8a171f-baa8-4ff1-b978-ec4c8bce7b1f, reason: Instance 9afa94d2-16a1-484f-96b4-8bbd93829ffe could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 893.231320] env[61898]: INFO nova.compute.manager [-] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Took 1.99 seconds to deallocate network for instance. [ 893.294649] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240915, 'name': ReconfigVM_Task, 'duration_secs': 0.540854} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.295145] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Reconfigured VM instance instance-00000057 to attach disk [datastore1] cf428138-4d0d-43bf-a654-06a62a82c9a1/cf428138-4d0d-43bf-a654-06a62a82c9a1.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 893.295836] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77813484-bb4b-43d0-b71b-abeccdf982dd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.299833] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.300164] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.300433] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.300674] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.301103] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.303331] env[61898]: INFO nova.compute.manager [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Terminating instance [ 893.307173] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 893.307173] env[61898]: value = "task-1240918" [ 893.307173] env[61898]: _type = "Task" [ 893.307173] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.320992] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240918, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.327095] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a1093c-4eb5-45d0-aa25-965fe781cf52 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.354166] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.003s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.357170] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.357980] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.292s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.360450] env[61898]: INFO nova.compute.claims [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 893.363490] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac2c6b1c-a06c-4bdf-875e-09cde04e3903 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.374710] env[61898]: DEBUG oslo_vmware.api [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 893.374710] env[61898]: value = "task-1240919" [ 893.374710] env[61898]: _type = "Task" [ 893.374710] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.379509] env[61898]: INFO nova.virt.block_device [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Booting with volume b04c905b-4035-4be9-9960-21b687a5e2a9 at /dev/sdb [ 893.384380] env[61898]: INFO nova.scheduler.client.report [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted allocations for instance 01685478-9d68-4edd-8dff-7d63fcd8bcd3 [ 893.389499] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "cdd5f647-2c43-4389-820d-2d39d7d20889" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.389895] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "cdd5f647-2c43-4389-820d-2d39d7d20889" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.390227] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "cdd5f647-2c43-4389-820d-2d39d7d20889-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.390658] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "cdd5f647-2c43-4389-820d-2d39d7d20889-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.390868] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "cdd5f647-2c43-4389-820d-2d39d7d20889-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.392913] env[61898]: DEBUG oslo_vmware.api [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240919, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.399138] env[61898]: INFO nova.compute.manager [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Terminating instance [ 893.440583] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5767ba51-5f69-4ada-a5d4-958e026c738d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.456566] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d997fd20-d909-40b4-9bc8-03a866cab11d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.503795] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a461e234-100d-437e-a3dc-b6b4f7dbb7f3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.514604] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240917, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.520602] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616d3422-f7e9-46b5-b66c-e7cff4613935 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.574988] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4de089-5118-4389-a132-9bcf6d1f40e0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.585244] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2660dda9-839b-4662-8d4f-d33e26300e1e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.612790] env[61898]: DEBUG nova.virt.block_device [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updating existing volume attachment record: fab4d4a2-10dd-45df-9077-5e46841d7045 {{(pid=61898) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 893.739741] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.810681] env[61898]: DEBUG nova.compute.manager [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 893.811230] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 893.812552] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a49f4f9-6b9e-4417-99fb-f400fe412e34 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.827110] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240918, 'name': Rename_Task, 'duration_secs': 0.207907} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.830184] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 893.834620] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.834933] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0d895d9-5b53-47b8-9115-883514438b62 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.837787] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79612064-771f-4f3c-8371-376b8cd3c2d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.846235] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 893.846235] env[61898]: value = "task-1240922" [ 893.846235] env[61898]: _type = "Task" [ 893.846235] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.852950] env[61898]: DEBUG oslo_vmware.api [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 893.852950] env[61898]: value = "task-1240923" [ 893.852950] env[61898]: _type = "Task" [ 893.852950] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.863612] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240922, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.870912] env[61898]: DEBUG oslo_vmware.api [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.890015] env[61898]: DEBUG oslo_vmware.api [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240919, 'name': PowerOffVM_Task, 'duration_secs': 0.269328} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.890485] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.898603] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Reconfiguring VM instance instance-00000051 to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 893.899868] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c14d8a6-074b-4669-a7ce-715a49b5c968 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.921064] env[61898]: DEBUG nova.compute.manager [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 893.921504] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 893.922542] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d22641d0-6c74-4b1a-a252-51214fd2400b tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "01685478-9d68-4edd-8dff-7d63fcd8bcd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.878s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.924754] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c103d4d8-aab7-494f-84b2-01026d58ce39 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.937945] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 893.941098] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ee7f3d2-92c2-4a7b-bfaf-b6aa27445bd6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.944044] env[61898]: DEBUG oslo_vmware.api [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 893.944044] env[61898]: value = "task-1240925" [ 893.944044] env[61898]: _type = "Task" [ 893.944044] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.955979] env[61898]: DEBUG oslo_vmware.rw_handles [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528927f4-3be6-3366-efa2-58c1390d6652/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 893.958110] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b40e15b-9502-4f9f-b90d-020b4b3a8ec0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.967031] env[61898]: DEBUG oslo_vmware.api [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240925, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.969950] env[61898]: DEBUG oslo_vmware.api [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 893.969950] env[61898]: value = "task-1240926" [ 893.969950] env[61898]: _type = "Task" [ 893.969950] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.976818] env[61898]: DEBUG oslo_vmware.rw_handles [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528927f4-3be6-3366-efa2-58c1390d6652/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 893.977089] env[61898]: ERROR oslo_vmware.rw_handles [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528927f4-3be6-3366-efa2-58c1390d6652/disk-0.vmdk due to incomplete transfer. [ 893.977893] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b95415a4-ea59-4a2c-892c-ef6ac9da0e66 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.984840] env[61898]: DEBUG oslo_vmware.api [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240926, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.990431] env[61898]: DEBUG oslo_vmware.rw_handles [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528927f4-3be6-3366-efa2-58c1390d6652/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 893.990898] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Uploaded image 0924447f-f2a2-454b-abe8-8a01ccf8a8b1 to the Glance image server {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 893.993559] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 893.994410] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7e0e535d-aac7-4938-bb1c-7d89ef82ce0f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.008760] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 894.008760] env[61898]: value = "task-1240927" [ 894.008760] env[61898]: _type = "Task" [ 894.008760] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.013290] env[61898]: DEBUG oslo_vmware.api [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1240917, 'name': PowerOnVM_Task, 'duration_secs': 0.71638} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.017378] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.017787] env[61898]: INFO nova.compute.manager [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Took 10.61 seconds to spawn the instance on the hypervisor. [ 894.018197] env[61898]: DEBUG nova.compute.manager [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 894.019411] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95c920a-2c85-4c8b-99f1-292b65e38d6a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.033920] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240927, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.364218] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240922, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.376771] env[61898]: DEBUG oslo_vmware.api [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240923, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.459869] env[61898]: DEBUG oslo_vmware.api [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240925, 'name': ReconfigVM_Task, 'duration_secs': 0.411357} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.463208] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Reconfigured VM instance instance-00000051 to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 894.463879] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 894.465250] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb7ebfc5-4c1f-44a5-aeef-789559e3106b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.477411] env[61898]: DEBUG oslo_vmware.api [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 894.477411] env[61898]: value = "task-1240928" [ 894.477411] env[61898]: _type = "Task" [ 894.477411] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.491756] env[61898]: DEBUG oslo_vmware.api [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240926, 'name': PowerOffVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.499133] env[61898]: DEBUG oslo_vmware.api [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.525082] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240927, 'name': Destroy_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.550437] env[61898]: INFO nova.compute.manager [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Took 24.17 seconds to build instance. [ 894.706420] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d780040-6770-4b04-ba9b-85db7a9a2571 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.716933] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac85c97-f149-415c-9b6b-f85db515fc0f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.751216] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5fcd2d-1cb7-4334-b18f-469184fbd012 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.760020] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6dd111-641d-474c-a7a3-768ade93e3a7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.775184] env[61898]: DEBUG nova.compute.provider_tree [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.862429] env[61898]: DEBUG oslo_vmware.api [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240922, 'name': PowerOnVM_Task, 'duration_secs': 0.78938} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.864213] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.865180] env[61898]: INFO nova.compute.manager [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Took 9.01 seconds to spawn the instance on the hypervisor. [ 894.865271] env[61898]: DEBUG nova.compute.manager [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 894.869775] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b0fdef-cb81-4c14-8a43-ba56b5b66c1b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.873019] env[61898]: DEBUG oslo_vmware.api [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240923, 'name': PowerOffVM_Task, 'duration_secs': 0.634176} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.873283] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 894.873886] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 894.874103] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6eace572-0acb-46c6-8d53-f09965a689ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.965141] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 894.965255] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 894.965375] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleting the datastore file [datastore2] 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 894.967197] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-090877ad-8ed6-4bed-9d9e-c8b10702a90a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.978230] env[61898]: DEBUG oslo_vmware.api [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 894.978230] env[61898]: value = "task-1240930" [ 894.978230] env[61898]: _type = "Task" [ 894.978230] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.982100] env[61898]: DEBUG oslo_vmware.api [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240926, 'name': PowerOffVM_Task, 'duration_secs': 0.532934} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.993366] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 894.993703] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 894.994438] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b139027-157b-46de-ad0a-aba395b3c31d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.003588] env[61898]: DEBUG oslo_vmware.api [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240930, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.008453] env[61898]: DEBUG oslo_vmware.api [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240928, 'name': PowerOnVM_Task} progress is 79%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.025361] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240927, 'name': Destroy_Task, 'duration_secs': 0.536333} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.025721] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Destroyed the VM [ 895.026070] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 895.027405] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c2cc6c35-bfb1-4cbf-8f4c-60f5cc49d700 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.037639] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 895.037639] env[61898]: value = "task-1240932" [ 895.037639] env[61898]: _type = "Task" [ 895.037639] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.048926] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240932, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.052965] env[61898]: DEBUG oslo_concurrency.lockutils [None req-befd6157-d19d-408b-9539-e232638776c4 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Lock "11ca5129-0dc3-44b3-8f7b-215c93dac764" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.691s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.078851] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 895.079798] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 895.080153] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleting the datastore file [datastore1] cdd5f647-2c43-4389-820d-2d39d7d20889 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.080614] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ed26a50-5fcb-4cb1-8304-7d2cceeb894e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.090212] env[61898]: DEBUG oslo_vmware.api [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for the task: (returnval){ [ 895.090212] env[61898]: value = "task-1240933" [ 895.090212] env[61898]: _type = "Task" [ 895.090212] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.100409] env[61898]: DEBUG oslo_vmware.api [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.278831] env[61898]: DEBUG nova.scheduler.client.report [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 895.396101] env[61898]: INFO nova.compute.manager [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Took 24.60 seconds to build instance. [ 895.444880] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "cd1335b7-78b7-4cea-add7-dd69736067b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.444880] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "cd1335b7-78b7-4cea-add7-dd69736067b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.498547] env[61898]: DEBUG oslo_vmware.api [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240928, 'name': PowerOnVM_Task, 'duration_secs': 0.729344} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.501976] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 895.502322] env[61898]: DEBUG nova.compute.manager [None req-6a03f8bb-fa87-479b-bdb9-ba642e9456dc tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 895.502710] env[61898]: DEBUG oslo_vmware.api [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240930, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146764} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.503552] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087c9eb7-edd2-4058-96ed-c76b1c314008 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.506705] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 895.506956] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 895.507247] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 895.507494] env[61898]: INFO nova.compute.manager [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Took 1.70 seconds to destroy the instance on the hypervisor. [ 895.507817] env[61898]: DEBUG oslo.service.loopingcall [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.508105] env[61898]: DEBUG nova.compute.manager [-] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 895.508217] env[61898]: DEBUG nova.network.neutron [-] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 895.550704] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240932, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.610844] env[61898]: DEBUG oslo_vmware.api [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Task: {'id': task-1240933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170932} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.611192] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 895.611408] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 895.611605] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 895.611790] env[61898]: INFO nova.compute.manager [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Took 1.69 seconds to destroy the instance on the hypervisor. [ 895.612083] env[61898]: DEBUG oslo.service.loopingcall [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.614465] env[61898]: DEBUG nova.compute.manager [-] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 895.614556] env[61898]: DEBUG nova.network.neutron [-] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 895.784635] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.785165] env[61898]: DEBUG nova.compute.manager [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 895.788596] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.626s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.788860] env[61898]: DEBUG nova.objects.instance [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=61898) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 895.900342] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0354abb5-86f1-4e65-a86d-aa666082de3d tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "cf428138-4d0d-43bf-a654-06a62a82c9a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.123s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.947654] env[61898]: DEBUG nova.compute.manager [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 896.051735] env[61898]: DEBUG oslo_vmware.api [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240932, 'name': RemoveSnapshot_Task, 'duration_secs': 0.714255} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.052953] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 896.053513] env[61898]: INFO nova.compute.manager [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Took 16.08 seconds to snapshot the instance on the hypervisor. [ 896.186838] env[61898]: DEBUG nova.compute.manager [req-b4c28a58-a4a2-45eb-953f-052fc3590291 req-2b100c3e-a206-491b-8177-b8256c1431ed service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Received event network-vif-deleted-df9c8d59-f506-4a95-b90b-85b338619b4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 896.187336] env[61898]: INFO nova.compute.manager [req-b4c28a58-a4a2-45eb-953f-052fc3590291 req-2b100c3e-a206-491b-8177-b8256c1431ed service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Neutron deleted interface df9c8d59-f506-4a95-b90b-85b338619b4a; detaching it from the instance and deleting it from the info cache [ 896.187574] env[61898]: DEBUG nova.network.neutron [req-b4c28a58-a4a2-45eb-953f-052fc3590291 req-2b100c3e-a206-491b-8177-b8256c1431ed service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.246982] env[61898]: DEBUG nova.compute.manager [req-7cae4bae-36ad-4d77-9dc9-ff2c8234dec7 req-c5942e23-cfed-4018-bf98-a4dbb94f13a2 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Received event network-changed-0d791731-b395-4858-b0b0-86de8a660e18 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 896.247224] env[61898]: DEBUG nova.compute.manager [req-7cae4bae-36ad-4d77-9dc9-ff2c8234dec7 req-c5942e23-cfed-4018-bf98-a4dbb94f13a2 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Refreshing instance network info cache due to event network-changed-0d791731-b395-4858-b0b0-86de8a660e18. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 896.247503] env[61898]: DEBUG oslo_concurrency.lockutils [req-7cae4bae-36ad-4d77-9dc9-ff2c8234dec7 req-c5942e23-cfed-4018-bf98-a4dbb94f13a2 service nova] Acquiring lock "refresh_cache-11ca5129-0dc3-44b3-8f7b-215c93dac764" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.247664] env[61898]: DEBUG oslo_concurrency.lockutils [req-7cae4bae-36ad-4d77-9dc9-ff2c8234dec7 req-c5942e23-cfed-4018-bf98-a4dbb94f13a2 service nova] Acquired lock "refresh_cache-11ca5129-0dc3-44b3-8f7b-215c93dac764" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.247873] env[61898]: DEBUG nova.network.neutron [req-7cae4bae-36ad-4d77-9dc9-ff2c8234dec7 req-c5942e23-cfed-4018-bf98-a4dbb94f13a2 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Refreshing network info cache for port 0d791731-b395-4858-b0b0-86de8a660e18 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.295049] env[61898]: DEBUG nova.compute.utils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 896.299456] env[61898]: DEBUG nova.compute.manager [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 896.299759] env[61898]: DEBUG nova.network.neutron [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 896.372302] env[61898]: DEBUG nova.policy [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a210751a82c744bb901f3e876728a900', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '025bf0ed02e24a998d2a6f7cf7ae77b1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 896.391724] env[61898]: WARNING oslo_messaging._drivers.amqpdriver [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 896.474134] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.508222] env[61898]: DEBUG nova.network.neutron [-] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.548711] env[61898]: DEBUG nova.network.neutron [-] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.626312] env[61898]: DEBUG nova.compute.manager [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Found 3 images (rotation: 2) {{(pid=61898) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4883}} [ 896.626596] env[61898]: DEBUG nova.compute.manager [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Rotating out 1 backups {{(pid=61898) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4891}} [ 896.626705] env[61898]: DEBUG nova.compute.manager [None req-bf14b410-8867-4c3d-a5f6-dfd5d4263ff6 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Deleting image 7d0b0872-de3d-40f0-91fd-fb21768b8b13 {{(pid=61898) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4896}} [ 896.690766] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-392852ed-538a-4220-803d-6515b1c684a4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.703232] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f61485-692a-4dc4-88f5-4d7cd0775658 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.743224] env[61898]: DEBUG nova.compute.manager [req-b4c28a58-a4a2-45eb-953f-052fc3590291 req-2b100c3e-a206-491b-8177-b8256c1431ed service nova] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Detach interface failed, port_id=df9c8d59-f506-4a95-b90b-85b338619b4a, reason: Instance 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 896.800108] env[61898]: DEBUG nova.compute.manager [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 896.803934] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e46d6411-cb03-44ce-969b-91830aeda6a6 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.804883] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.647s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.805132] env[61898]: DEBUG nova.objects.instance [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lazy-loading 'resources' on Instance uuid e5c38d18-18e4-47dc-8445-71d3dc0c325a {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 896.895402] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "80931b22-a69b-41cd-b707-13bf11111b88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.895720] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "80931b22-a69b-41cd-b707-13bf11111b88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.895966] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "80931b22-a69b-41cd-b707-13bf11111b88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.896215] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "80931b22-a69b-41cd-b707-13bf11111b88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.896424] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "80931b22-a69b-41cd-b707-13bf11111b88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.899031] env[61898]: INFO nova.compute.manager [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Terminating instance [ 897.005635] env[61898]: DEBUG nova.network.neutron [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Successfully created port: f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 897.012268] env[61898]: INFO nova.compute.manager [-] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Took 1.50 seconds to deallocate network for instance. [ 897.028904] env[61898]: DEBUG nova.compute.manager [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 897.030143] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f058b057-04f1-46e8-bdc6-a7565e00ae4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.052099] env[61898]: INFO nova.compute.manager [-] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Took 1.44 seconds to deallocate network for instance. [ 897.202576] env[61898]: DEBUG nova.network.neutron [req-7cae4bae-36ad-4d77-9dc9-ff2c8234dec7 req-c5942e23-cfed-4018-bf98-a4dbb94f13a2 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Updated VIF entry in instance network info cache for port 0d791731-b395-4858-b0b0-86de8a660e18. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 897.202977] env[61898]: DEBUG nova.network.neutron [req-7cae4bae-36ad-4d77-9dc9-ff2c8234dec7 req-c5942e23-cfed-4018-bf98-a4dbb94f13a2 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Updating instance_info_cache with network_info: [{"id": "0d791731-b395-4858-b0b0-86de8a660e18", "address": "fa:16:3e:7b:27:63", "network": {"id": "24787caa-0ab1-4d06-b31c-d9910fc68de3", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1660262411-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ed1f0593fdb4221b84147f56049153e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ce17e10e-2fb0-4191-afee-e2b89fa15074", "external-id": "nsx-vlan-transportzone-352", "segmentation_id": 352, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d791731-b3", "ovs_interfaceid": "0d791731-b395-4858-b0b0-86de8a660e18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.403191] env[61898]: DEBUG nova.compute.manager [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 897.403484] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 897.404662] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a839aa5b-8947-4a5f-be81-676a07f8a71f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.416760] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 897.417118] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf1541e9-4825-49db-b466-9f6e7cab8e3f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.427043] env[61898]: DEBUG oslo_vmware.api [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 897.427043] env[61898]: value = "task-1240935" [ 897.427043] env[61898]: _type = "Task" [ 897.427043] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.444606] env[61898]: DEBUG oslo_vmware.api [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240935, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.519383] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.542856] env[61898]: INFO nova.compute.manager [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] instance snapshotting [ 897.548104] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b55210-50c8-4cf3-8863-0f627729623e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.570066] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.573839] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e302110-737e-45bc-9fa6-9a5dfcd8086e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.696910] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c137b650-42f4-42b1-a3e1-de2f6d9795d7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.706956] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec7eb43-e87a-4ea9-a0d6-c63c1d6ac990 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.710902] env[61898]: DEBUG oslo_concurrency.lockutils [req-7cae4bae-36ad-4d77-9dc9-ff2c8234dec7 req-c5942e23-cfed-4018-bf98-a4dbb94f13a2 service nova] Releasing lock "refresh_cache-11ca5129-0dc3-44b3-8f7b-215c93dac764" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.742656] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846dc160-561c-489e-8ad9-b0c594f5a867 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.753904] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1993c62e-8699-482c-bda1-14fb999a971d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.769415] env[61898]: DEBUG nova.compute.provider_tree [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.814458] env[61898]: DEBUG nova.compute.manager [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 897.838363] env[61898]: DEBUG nova.virt.hardware [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 897.839360] env[61898]: DEBUG nova.virt.hardware [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 897.839608] env[61898]: DEBUG nova.virt.hardware [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 897.839871] env[61898]: DEBUG nova.virt.hardware [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 897.840096] env[61898]: DEBUG nova.virt.hardware [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 897.840335] env[61898]: DEBUG nova.virt.hardware [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 897.840618] env[61898]: DEBUG nova.virt.hardware [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 897.840844] env[61898]: DEBUG nova.virt.hardware [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 897.841102] env[61898]: DEBUG nova.virt.hardware [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 897.841357] env[61898]: DEBUG nova.virt.hardware [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 897.841605] env[61898]: DEBUG nova.virt.hardware [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 897.842527] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0c1c7b-89c9-4de1-863d-f5297b0ccd45 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.852039] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15349693-8adb-444c-9fd4-aea8be59585b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.937383] env[61898]: DEBUG oslo_vmware.api [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240935, 'name': PowerOffVM_Task, 'duration_secs': 0.30889} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.937717] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 897.937893] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 897.938181] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50621b7f-e496-46e4-b081-dd1d6ffd86b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.015367] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 898.015621] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 898.015809] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Deleting the datastore file [datastore1] 80931b22-a69b-41cd-b707-13bf11111b88 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 898.016111] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3307e092-9550-42f5-8989-a783657d984c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.024403] env[61898]: DEBUG oslo_vmware.api [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 898.024403] env[61898]: value = "task-1240937" [ 898.024403] env[61898]: _type = "Task" [ 898.024403] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.032991] env[61898]: DEBUG oslo_vmware.api [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240937, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.086463] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 898.086811] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2b9d8df2-e846-4984-a2c4-e1fb16575660 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.097641] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 898.097641] env[61898]: value = "task-1240938" [ 898.097641] env[61898]: _type = "Task" [ 898.097641] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.106440] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240938, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.213467] env[61898]: DEBUG nova.compute.manager [req-9b5e02fc-cb6d-42d4-812f-723ed60c2ec6 req-3a7c6ad4-1a8e-4f55-9b5a-2e67eee66690 service nova] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Received event network-vif-deleted-d3fda812-e082-4563-9ca3-516f9e0b6e27 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 898.273521] env[61898]: DEBUG nova.scheduler.client.report [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 898.535822] env[61898]: DEBUG oslo_vmware.api [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240937, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219988} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.536227] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 898.536425] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 898.536598] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 898.537146] env[61898]: INFO nova.compute.manager [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Took 1.13 seconds to destroy the instance on the hypervisor. [ 898.537604] env[61898]: DEBUG oslo.service.loopingcall [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.537604] env[61898]: DEBUG nova.compute.manager [-] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 898.537721] env[61898]: DEBUG nova.network.neutron [-] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 898.610630] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240938, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.779528] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.974s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.781948] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 9.969s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.812719] env[61898]: INFO nova.scheduler.client.report [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleted allocations for instance e5c38d18-18e4-47dc-8445-71d3dc0c325a [ 899.108327] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240938, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.232388] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.294835] env[61898]: INFO nova.compute.claims [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.321334] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8e35c18d-605f-45c7-9d58-68f7a33eeeaf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "e5c38d18-18e4-47dc-8445-71d3dc0c325a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.113s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.326588] env[61898]: DEBUG nova.network.neutron [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Successfully updated port: f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 899.611565] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240938, 'name': CreateSnapshot_Task, 'duration_secs': 1.47297} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.611802] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 899.612584] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956fbe37-e285-419f-89a5-73f986808206 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.803218] env[61898]: INFO nova.compute.resource_tracker [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating resource usage from migration 2e52a2f3-ee15-4f80-b956-7e2560a4a289 [ 899.828562] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquiring lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.828721] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquired lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.828918] env[61898]: DEBUG nova.network.neutron [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 899.906600] env[61898]: DEBUG nova.network.neutron [-] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.050411] env[61898]: DEBUG oslo_concurrency.lockutils [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.050732] env[61898]: DEBUG oslo_concurrency.lockutils [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.050925] env[61898]: DEBUG nova.compute.manager [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 900.054206] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50070310-d899-4901-8069-823330ca5656 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.062086] env[61898]: DEBUG nova.compute.manager [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61898) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 900.062744] env[61898]: DEBUG nova.objects.instance [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'flavor' on Instance uuid 7c6aad92-6e91-48fc-89ae-5ee4c89f449c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 900.068286] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88266f6f-431b-430d-bb8a-0de94ef2b043 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.075370] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98349622-c5b2-4dc1-abab-54dbe00774df {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.079403] env[61898]: DEBUG oslo_concurrency.lockutils [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "86367a82-239b-4f6e-b306-d9661eadf95e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.079636] env[61898]: DEBUG oslo_concurrency.lockutils [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "86367a82-239b-4f6e-b306-d9661eadf95e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.079838] env[61898]: DEBUG oslo_concurrency.lockutils [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "86367a82-239b-4f6e-b306-d9661eadf95e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.080037] env[61898]: DEBUG oslo_concurrency.lockutils [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "86367a82-239b-4f6e-b306-d9661eadf95e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.080216] env[61898]: DEBUG oslo_concurrency.lockutils [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "86367a82-239b-4f6e-b306-d9661eadf95e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.082461] env[61898]: INFO nova.compute.manager [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Terminating instance [ 900.111639] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c061e82c-ce81-4511-bab4-64e8977f3ef5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.119862] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700d9f30-55b2-435a-b08a-44fab5e59b85 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.130678] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 900.140120] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-46661a0c-568a-4020-a936-cca153524e81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.143298] env[61898]: DEBUG nova.compute.provider_tree [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.149429] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 900.149429] env[61898]: value = "task-1240939" [ 900.149429] env[61898]: _type = "Task" [ 900.149429] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.157828] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240939, 'name': CloneVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.378629] env[61898]: DEBUG nova.network.neutron [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.409266] env[61898]: INFO nova.compute.manager [-] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Took 1.87 seconds to deallocate network for instance. [ 900.540716] env[61898]: DEBUG nova.compute.manager [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Received event network-vif-plugged-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 900.540940] env[61898]: DEBUG oslo_concurrency.lockutils [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] Acquiring lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.541174] env[61898]: DEBUG oslo_concurrency.lockutils [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] Lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.541351] env[61898]: DEBUG oslo_concurrency.lockutils [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] Lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.541521] env[61898]: DEBUG nova.compute.manager [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] No waiting events found dispatching network-vif-plugged-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 900.541710] env[61898]: WARNING nova.compute.manager [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Received unexpected event network-vif-plugged-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f for instance with vm_state building and task_state spawning. [ 900.541892] env[61898]: DEBUG nova.compute.manager [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Received event network-changed-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 900.542042] env[61898]: DEBUG nova.compute.manager [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Refreshing instance network info cache due to event network-changed-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 900.542219] env[61898]: DEBUG oslo_concurrency.lockutils [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] Acquiring lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.569818] env[61898]: DEBUG nova.network.neutron [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updating instance_info_cache with network_info: [{"id": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "address": "fa:16:3e:fb:a9:9c", "network": {"id": "6b546ef4-b25d-43da-a6dc-d9bad3ad3f0f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1330389419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "025bf0ed02e24a998d2a6f7cf7ae77b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8ab1155-cd", "ovs_interfaceid": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.585774] env[61898]: DEBUG nova.compute.manager [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 900.586054] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 900.587426] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f8ec76-3359-4071-80dc-0c0b853c49ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.604819] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.605110] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ef5426a-c140-497c-8af0-e9f6eb2c1da4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.611502] env[61898]: DEBUG oslo_vmware.api [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 900.611502] env[61898]: value = "task-1240940" [ 900.611502] env[61898]: _type = "Task" [ 900.611502] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.619342] env[61898]: DEBUG oslo_vmware.api [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240940, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.647030] env[61898]: DEBUG nova.scheduler.client.report [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 900.660110] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240939, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.917447] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.072444] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 901.072960] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Releasing lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.073425] env[61898]: DEBUG nova.compute.manager [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Instance network_info: |[{"id": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "address": "fa:16:3e:fb:a9:9c", "network": {"id": "6b546ef4-b25d-43da-a6dc-d9bad3ad3f0f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1330389419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "025bf0ed02e24a998d2a6f7cf7ae77b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8ab1155-cd", "ovs_interfaceid": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 901.073752] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d8cda06-a423-420c-9524-58b22fb781ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.075800] env[61898]: DEBUG oslo_concurrency.lockutils [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] Acquired lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.075998] env[61898]: DEBUG nova.network.neutron [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Refreshing network info cache for port f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.077214] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:a9:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.085574] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Creating folder: Project (025bf0ed02e24a998d2a6f7cf7ae77b1). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 901.088939] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd18d973-a749-419e-8b14-8195eab3bd7f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.097251] env[61898]: DEBUG oslo_vmware.api [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 901.097251] env[61898]: value = "task-1240941" [ 901.097251] env[61898]: _type = "Task" [ 901.097251] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.102550] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Created folder: Project (025bf0ed02e24a998d2a6f7cf7ae77b1) in parent group-v267550. [ 901.102812] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Creating folder: Instances. Parent ref: group-v267686. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 901.103445] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3a36316-6095-4e59-91ed-f31334a28cee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.108642] env[61898]: DEBUG oslo_vmware.api [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240941, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.117233] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Created folder: Instances in parent group-v267686. [ 901.117562] env[61898]: DEBUG oslo.service.loopingcall [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.118106] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 901.118368] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce92114b-6bf0-46bc-8152-542f69d4d731 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.136895] env[61898]: DEBUG oslo_vmware.api [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240940, 'name': PowerOffVM_Task, 'duration_secs': 0.367169} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.139707] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 901.139887] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 901.140202] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bb7e593-043c-4f88-be25-76e778e5007f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.144430] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.144430] env[61898]: value = "task-1240944" [ 901.144430] env[61898]: _type = "Task" [ 901.144430] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.158314] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.376s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.158561] env[61898]: INFO nova.compute.manager [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Migrating [ 901.158748] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.159010] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "compute-rpcapi-router" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.160296] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240944, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.161329] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.299s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.162501] env[61898]: INFO nova.compute.claims [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 901.167453] env[61898]: INFO nova.compute.rpcapi [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 901.167453] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "compute-rpcapi-router" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.179038] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240939, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.218916] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 901.219313] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 901.219604] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleting the datastore file [datastore1] 86367a82-239b-4f6e-b306-d9661eadf95e {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 901.220656] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d79076b7-d6c8-4e46-b691-b805e0379fca {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.229463] env[61898]: DEBUG oslo_vmware.api [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 901.229463] env[61898]: value = "task-1240946" [ 901.229463] env[61898]: _type = "Task" [ 901.229463] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.240654] env[61898]: DEBUG oslo_vmware.api [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.341210] env[61898]: DEBUG nova.network.neutron [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updated VIF entry in instance network info cache for port f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 901.341674] env[61898]: DEBUG nova.network.neutron [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updating instance_info_cache with network_info: [{"id": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "address": "fa:16:3e:fb:a9:9c", "network": {"id": "6b546ef4-b25d-43da-a6dc-d9bad3ad3f0f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1330389419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "025bf0ed02e24a998d2a6f7cf7ae77b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8ab1155-cd", "ovs_interfaceid": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.610479] env[61898]: DEBUG oslo_vmware.api [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1240941, 'name': PowerOffVM_Task, 'duration_secs': 0.290841} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.611045] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 901.611440] env[61898]: DEBUG nova.compute.manager [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 901.612598] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9940af1-cd20-4e65-8cef-025d97f23d73 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.654319] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240944, 'name': CreateVM_Task, 'duration_secs': 0.411791} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.658027] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 901.659529] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.659700] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.660309] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 901.661331] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c32047b5-ea77-436c-a1a1-38bfc5db60b7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.666685] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240939, 'name': CloneVM_Task, 'duration_secs': 1.371142} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.667404] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Created linked-clone VM from snapshot [ 901.668232] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3e5832-1d63-43f0-92e0-edec442c1990 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.673379] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 901.673379] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528b0f57-e9e1-d339-18f6-25a92d4266e6" [ 901.673379] env[61898]: _type = "Task" [ 901.673379] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.684947] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Uploading image 7d207fb3-39a4-452d-a133-40f06b6cc713 {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 901.692710] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.692916] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.693258] env[61898]: DEBUG nova.network.neutron [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.701247] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]528b0f57-e9e1-d339-18f6-25a92d4266e6, 'name': SearchDatastore_Task, 'duration_secs': 0.008968} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.701634] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.701895] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 901.702181] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.702382] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.702537] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 901.702791] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a8018d0-7cae-4571-b8d7-da85a0b4f2b2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.712881] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 901.713107] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 901.715742] env[61898]: DEBUG oslo_vmware.rw_handles [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 901.715742] env[61898]: value = "vm-267685" [ 901.715742] env[61898]: _type = "VirtualMachine" [ 901.715742] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 901.715949] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fd37d6f-82bc-4069-9ee4-b86c903744f2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.718601] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e9ea327c-e902-4709-a026-0b60cdf1e104 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.725334] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 901.725334] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52cb4d73-9a16-502b-3f4f-402575cf3c1e" [ 901.725334] env[61898]: _type = "Task" [ 901.725334] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.730522] env[61898]: DEBUG oslo_vmware.rw_handles [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lease: (returnval){ [ 901.730522] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528e587b-09fa-d39c-160b-2b6d7cd1ff37" [ 901.730522] env[61898]: _type = "HttpNfcLease" [ 901.730522] env[61898]: } obtained for exporting VM: (result){ [ 901.730522] env[61898]: value = "vm-267685" [ 901.730522] env[61898]: _type = "VirtualMachine" [ 901.730522] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 901.730857] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the lease: (returnval){ [ 901.730857] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528e587b-09fa-d39c-160b-2b6d7cd1ff37" [ 901.730857] env[61898]: _type = "HttpNfcLease" [ 901.730857] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 901.744177] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cb4d73-9a16-502b-3f4f-402575cf3c1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.753839] env[61898]: DEBUG oslo_vmware.api [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151742} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.755171] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 901.755171] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528e587b-09fa-d39c-160b-2b6d7cd1ff37" [ 901.755171] env[61898]: _type = "HttpNfcLease" [ 901.755171] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 901.755171] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 901.755171] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 901.755171] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 901.755171] env[61898]: INFO nova.compute.manager [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Took 1.17 seconds to destroy the instance on the hypervisor. [ 901.755171] env[61898]: DEBUG oslo.service.loopingcall [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.755633] env[61898]: DEBUG oslo_vmware.rw_handles [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 901.755633] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528e587b-09fa-d39c-160b-2b6d7cd1ff37" [ 901.755633] env[61898]: _type = "HttpNfcLease" [ 901.755633] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 901.755811] env[61898]: DEBUG nova.compute.manager [-] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 901.755811] env[61898]: DEBUG nova.network.neutron [-] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 901.757812] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a389c8-9942-42f2-9f6e-dc2195f1507e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.767956] env[61898]: DEBUG oslo_vmware.rw_handles [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527bc379-2a48-3a05-eef2-86bf71682948/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 901.768167] env[61898]: DEBUG oslo_vmware.rw_handles [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527bc379-2a48-3a05-eef2-86bf71682948/disk-0.vmdk for reading. {{(pid=61898) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 901.844339] env[61898]: DEBUG oslo_concurrency.lockutils [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] Releasing lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.844612] env[61898]: DEBUG nova.compute.manager [req-c6234edc-6539-4431-b026-22c814c3b225 req-1ce02ab0-6bc3-4ae2-b09e-9231f641b2b9 service nova] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Received event network-vif-deleted-53551414-df45-4670-abea-be494090dd14 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 901.970967] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-90dd337e-1d02-467e-9445-0de365546a06 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.079065] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c978d9b-8154-4294-bbc6-e1de2b9a1b1d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.087097] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d7935c-80bd-414d-8cee-cd93bb96cbaf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.117328] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c499b46-136d-42ce-ae6d-36eeaecaefe0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.126983] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa2eebd-861f-464c-a788-075e99ce6083 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.132181] env[61898]: DEBUG oslo_concurrency.lockutils [None req-729d6e71-e05b-4d41-af5a-90ed148b4471 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.081s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.144430] env[61898]: DEBUG nova.compute.provider_tree [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.238490] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cb4d73-9a16-502b-3f4f-402575cf3c1e, 'name': SearchDatastore_Task, 'duration_secs': 0.010899} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.240028] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d06af38-4b70-4bdb-b376-8d3a3fe1d852 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.246183] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 902.246183] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52c6a8d9-34f4-80c8-5adf-e3f1a4ce19cc" [ 902.246183] env[61898]: _type = "Task" [ 902.246183] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.254186] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c6a8d9-34f4-80c8-5adf-e3f1a4ce19cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.562576] env[61898]: DEBUG nova.network.neutron [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance_info_cache with network_info: [{"id": "dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a", "address": "fa:16:3e:08:d8:91", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc3e4cf3-8b", "ovs_interfaceid": "dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.584797] env[61898]: DEBUG nova.compute.manager [req-166e6916-edd6-4aa7-9569-28177dee2011 req-960bc5a5-4bfe-4024-8fd4-7253cffaed88 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Received event network-vif-deleted-fe9b11a7-dec5-4707-bb53-ea517e5a1b55 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 902.584797] env[61898]: INFO nova.compute.manager [req-166e6916-edd6-4aa7-9569-28177dee2011 req-960bc5a5-4bfe-4024-8fd4-7253cffaed88 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Neutron deleted interface fe9b11a7-dec5-4707-bb53-ea517e5a1b55; detaching it from the instance and deleting it from the info cache [ 902.584797] env[61898]: DEBUG nova.network.neutron [req-166e6916-edd6-4aa7-9569-28177dee2011 req-960bc5a5-4bfe-4024-8fd4-7253cffaed88 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.646686] env[61898]: DEBUG nova.scheduler.client.report [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 902.758271] env[61898]: DEBUG nova.network.neutron [-] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.759780] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c6a8d9-34f4-80c8-5adf-e3f1a4ce19cc, 'name': SearchDatastore_Task, 'duration_secs': 0.010157} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.760379] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.760379] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4/b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 902.761563] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-708bba25-127a-4d02-9d55-9aa23b856aa6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.772151] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 902.772151] env[61898]: value = "task-1240948" [ 902.772151] env[61898]: _type = "Task" [ 902.772151] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.781694] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240948, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.020209] env[61898]: DEBUG oslo_vmware.rw_handles [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e875ed-bb57-cb3d-b7f3-791537dbccab/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 903.021054] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1aad593-fdf1-4e55-9626-650064a485f9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.029727] env[61898]: DEBUG oslo_vmware.rw_handles [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e875ed-bb57-cb3d-b7f3-791537dbccab/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 903.029984] env[61898]: ERROR oslo_vmware.rw_handles [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e875ed-bb57-cb3d-b7f3-791537dbccab/disk-0.vmdk due to incomplete transfer. [ 903.030287] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9735eba9-4a49-4b59-819e-7863e39e61a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.039482] env[61898]: DEBUG oslo_vmware.rw_handles [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e875ed-bb57-cb3d-b7f3-791537dbccab/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 903.040188] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Uploaded image 038581bd-8ae3-45c6-8697-83c7fb01abff to the Glance image server {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 903.042199] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 903.042594] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-848dfd80-b760-4392-9b57-44ffba72618f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.050110] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 903.050110] env[61898]: value = "task-1240949" [ 903.050110] env[61898]: _type = "Task" [ 903.050110] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.060311] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240949, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.066179] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.087511] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cfc13481-512a-482f-837a-d64b16a8fc90 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.099972] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56835838-fe68-461c-aefc-1a807a97dc4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.138111] env[61898]: DEBUG nova.compute.manager [req-166e6916-edd6-4aa7-9569-28177dee2011 req-960bc5a5-4bfe-4024-8fd4-7253cffaed88 service nova] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Detach interface failed, port_id=fe9b11a7-dec5-4707-bb53-ea517e5a1b55, reason: Instance 86367a82-239b-4f6e-b306-d9661eadf95e could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 903.152947] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.992s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.153683] env[61898]: DEBUG nova.compute.manager [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 903.157186] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.418s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.157539] env[61898]: DEBUG nova.objects.instance [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lazy-loading 'resources' on Instance uuid 9afa94d2-16a1-484f-96b4-8bbd93829ffe {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 903.262033] env[61898]: INFO nova.compute.manager [-] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Took 1.51 seconds to deallocate network for instance. [ 903.280429] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240948, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504011} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.280704] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4/b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 903.280930] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 903.281206] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-786fe461-6490-4e43-b615-e5bbfe56a855 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.287586] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 903.287586] env[61898]: value = "task-1240950" [ 903.287586] env[61898]: _type = "Task" [ 903.287586] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.296780] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240950, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.560625] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240949, 'name': Destroy_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.583069] env[61898]: DEBUG nova.compute.manager [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Stashing vm_state: stopped {{(pid=61898) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 903.661065] env[61898]: DEBUG nova.compute.utils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 903.665409] env[61898]: DEBUG nova.compute.manager [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 903.665644] env[61898]: DEBUG nova.network.neutron [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 903.704817] env[61898]: DEBUG nova.policy [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080b70de4bd5465e8e696943b90f4ff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2c65efa327e403284ad2e78b3c7b7d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 903.769451] env[61898]: DEBUG oslo_concurrency.lockutils [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.799339] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240950, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081744} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.799622] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 903.800517] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d0f127-f5eb-4b00-a504-6c033f781072 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.824907] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4/b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.827534] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f328904c-d00c-4bfb-ae18-880df8bf5eb3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.847647] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 903.847647] env[61898]: value = "task-1240951" [ 903.847647] env[61898]: _type = "Task" [ 903.847647] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.857753] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240951, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.977298] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5073a86a-b9e2-4c47-a968-7dfda1e8359d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.985410] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f2537e-db80-4759-a5ec-d727e43704a4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.021238] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7db728-ff54-46e2-a85c-9513f65f5670 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.029117] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb886fc3-5ec4-477e-be9e-8128aab30604 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.053078] env[61898]: DEBUG nova.compute.provider_tree [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.062864] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240949, 'name': Destroy_Task, 'duration_secs': 0.630214} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.063325] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Destroyed the VM [ 904.063774] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 904.064753] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-92699890-8179-46d1-9dc1-20710bdcb5ab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.071940] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 904.071940] env[61898]: value = "task-1240952" [ 904.071940] env[61898]: _type = "Task" [ 904.071940] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.083385] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240952, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.102899] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 904.142328] env[61898]: DEBUG nova.network.neutron [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Successfully created port: 62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 904.166661] env[61898]: DEBUG nova.compute.manager [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 904.358818] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240951, 'name': ReconfigVM_Task, 'duration_secs': 0.27715} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.359207] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Reconfigured VM instance instance-00000058 to attach disk [datastore2] b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4/b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 904.360068] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3314bf0-fa09-4c5a-929f-9ad99a9ac9ba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.368192] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 904.368192] env[61898]: value = "task-1240953" [ 904.368192] env[61898]: _type = "Task" [ 904.368192] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.378517] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240953, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.559291] env[61898]: DEBUG nova.scheduler.client.report [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 904.583938] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240952, 'name': RemoveSnapshot_Task, 'duration_secs': 0.494694} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.586458] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 904.586740] env[61898]: DEBUG nova.compute.manager [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 904.588366] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71dc171a-2ed3-4aae-ae45-6ae2eac573b3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.591501] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f113be-b5d4-434c-a0a7-0bd9a9b26d6e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.616411] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance '5323b250-fad8-4d71-81ed-c5e5eeb8aeab' progress to 0 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 904.878505] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240953, 'name': Rename_Task, 'duration_secs': 0.147263} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.878877] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.879315] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94a79a63-20c1-4876-b967-e08ad4e29d01 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.886806] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 904.886806] env[61898]: value = "task-1240954" [ 904.886806] env[61898]: _type = "Task" [ 904.886806] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.895015] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240954, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.065038] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.908s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.067786] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.594s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.069395] env[61898]: INFO nova.compute.claims [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.092554] env[61898]: INFO nova.scheduler.client.report [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleted allocations for instance 9afa94d2-16a1-484f-96b4-8bbd93829ffe [ 905.123871] env[61898]: INFO nova.compute.manager [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Shelve offloading [ 905.126477] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.127823] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a6469d6-4ecf-4244-a5b2-f7235af1a960 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.136024] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 905.136024] env[61898]: value = "task-1240955" [ 905.136024] env[61898]: _type = "Task" [ 905.136024] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.146033] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240955, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.176080] env[61898]: DEBUG nova.compute.manager [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 905.198458] env[61898]: DEBUG nova.virt.hardware [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 905.198757] env[61898]: DEBUG nova.virt.hardware [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 905.198900] env[61898]: DEBUG nova.virt.hardware [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 905.199173] env[61898]: DEBUG nova.virt.hardware [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 905.199383] env[61898]: DEBUG nova.virt.hardware [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 905.199572] env[61898]: DEBUG nova.virt.hardware [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 905.199837] env[61898]: DEBUG nova.virt.hardware [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 905.200083] env[61898]: DEBUG nova.virt.hardware [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 905.200319] env[61898]: DEBUG nova.virt.hardware [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 905.200522] env[61898]: DEBUG nova.virt.hardware [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 905.200758] env[61898]: DEBUG nova.virt.hardware [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 905.201823] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8805d5c1-3ac7-4e33-b9e5-b7efc521fd18 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.210304] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb9481f-2119-4c2f-ac49-3b12eeb8cf0b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.397701] env[61898]: DEBUG oslo_vmware.api [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240954, 'name': PowerOnVM_Task, 'duration_secs': 0.446139} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.398098] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.398646] env[61898]: INFO nova.compute.manager [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Took 7.58 seconds to spawn the instance on the hypervisor. [ 905.398838] env[61898]: DEBUG nova.compute.manager [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 905.399592] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a109628-91c9-4433-9315-f9bd876be849 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.602326] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddb6aebc-c282-4385-8d63-baaf3ba50987 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "9afa94d2-16a1-484f-96b4-8bbd93829ffe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.524s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.631620] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.631828] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93efbbdd-d54b-4a04-bab9-638ce2e619c4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.643354] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 905.643354] env[61898]: value = "task-1240956" [ 905.643354] env[61898]: _type = "Task" [ 905.643354] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.649246] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240955, 'name': PowerOffVM_Task, 'duration_secs': 0.242998} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.650556] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.650556] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance '5323b250-fad8-4d71-81ed-c5e5eeb8aeab' progress to 17 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 905.657165] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] VM already powered off {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 905.657418] env[61898]: DEBUG nova.compute.manager [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 905.658235] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5178711a-1793-4609-b325-d347cf1899ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.664307] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.664512] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.664645] env[61898]: DEBUG nova.network.neutron [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.807658] env[61898]: DEBUG nova.compute.manager [req-5845002a-3d32-4519-aaa6-101c82e05731 req-70b28182-8c51-4060-a8fc-9fd06db39138 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Received event network-vif-plugged-62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 905.807963] env[61898]: DEBUG oslo_concurrency.lockutils [req-5845002a-3d32-4519-aaa6-101c82e05731 req-70b28182-8c51-4060-a8fc-9fd06db39138 service nova] Acquiring lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.808146] env[61898]: DEBUG oslo_concurrency.lockutils [req-5845002a-3d32-4519-aaa6-101c82e05731 req-70b28182-8c51-4060-a8fc-9fd06db39138 service nova] Lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.808408] env[61898]: DEBUG oslo_concurrency.lockutils [req-5845002a-3d32-4519-aaa6-101c82e05731 req-70b28182-8c51-4060-a8fc-9fd06db39138 service nova] Lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.808591] env[61898]: DEBUG nova.compute.manager [req-5845002a-3d32-4519-aaa6-101c82e05731 req-70b28182-8c51-4060-a8fc-9fd06db39138 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] No waiting events found dispatching network-vif-plugged-62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 905.808762] env[61898]: WARNING nova.compute.manager [req-5845002a-3d32-4519-aaa6-101c82e05731 req-70b28182-8c51-4060-a8fc-9fd06db39138 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Received unexpected event network-vif-plugged-62f1251d-f84b-4c28-ab74-971fef0d640f for instance with vm_state building and task_state spawning. [ 905.917825] env[61898]: INFO nova.compute.manager [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Took 24.87 seconds to build instance. [ 906.001145] env[61898]: DEBUG nova.network.neutron [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Successfully updated port: 62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 906.162841] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 906.163254] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 906.163317] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.163455] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 906.163604] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.163759] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 906.163966] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 906.164150] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 906.164324] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 906.164494] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 906.164677] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 906.173432] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f23f84e1-8aaf-428a-a430-8c1f1e5d5466 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.192512] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 906.192512] env[61898]: value = "task-1240957" [ 906.192512] env[61898]: _type = "Task" [ 906.192512] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.203285] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240957, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.364601] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3d3fd9-2632-4cd7-9c13-1dd477cf7169 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.373220] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7ab9a7-5708-417a-b495-e561e3523d05 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.410442] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6a422f-d3f4-4c2e-96d1-6c603885a684 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.419821] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4314ca7-92b5-4b96-b842-3b1eafe02cb7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.425118] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf951ae5-3153-4d99-adff-877342652fbf tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.388s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.437105] env[61898]: DEBUG nova.compute.provider_tree [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.504920] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.505097] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.505238] env[61898]: DEBUG nova.network.neutron [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.618535] env[61898]: DEBUG nova.network.neutron [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updating instance_info_cache with network_info: [{"id": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "address": "fa:16:3e:be:2b:9a", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53a6375d-a9", "ovs_interfaceid": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.703383] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240957, 'name': ReconfigVM_Task, 'duration_secs': 0.313189} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.703756] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance '5323b250-fad8-4d71-81ed-c5e5eeb8aeab' progress to 33 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 906.942128] env[61898]: DEBUG nova.scheduler.client.report [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 907.055206] env[61898]: DEBUG nova.network.neutron [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 907.121587] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.215484] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 907.215484] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 907.215484] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.215484] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 907.215484] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.215484] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 907.215484] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 907.215484] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 907.216304] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 907.216673] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 907.217025] env[61898]: DEBUG nova.virt.hardware [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 907.223939] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Reconfiguring VM instance instance-00000052 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 907.224452] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4ebe2a7-cec6-4525-8542-83e7f0c633c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.250098] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 907.250098] env[61898]: value = "task-1240958" [ 907.250098] env[61898]: _type = "Task" [ 907.250098] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.257967] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240958, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.291406] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "47208ebd-8407-4d00-8378-adb0a4a21c2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.291406] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "47208ebd-8407-4d00-8378-adb0a4a21c2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.319721] env[61898]: INFO nova.compute.manager [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Rescuing [ 907.319721] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquiring lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.319721] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquired lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.319721] env[61898]: DEBUG nova.network.neutron [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 907.446174] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.378s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.447245] env[61898]: DEBUG nova.compute.manager [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 907.452212] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.932s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.452212] env[61898]: DEBUG nova.objects.instance [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lazy-loading 'resources' on Instance uuid 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 907.488483] env[61898]: DEBUG nova.network.neutron [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updating instance_info_cache with network_info: [{"id": "62f1251d-f84b-4c28-ab74-971fef0d640f", "address": "fa:16:3e:27:01:3e", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62f1251d-f8", "ovs_interfaceid": "62f1251d-f84b-4c28-ab74-971fef0d640f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.555174] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.555462] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.555677] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.555929] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.556045] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.558415] env[61898]: INFO nova.compute.manager [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Terminating instance [ 907.760019] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240958, 'name': ReconfigVM_Task, 'duration_secs': 0.202851} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.760435] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Reconfigured VM instance instance-00000052 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 907.761402] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf26cd2-1d6e-41ff-8562-9280c762853a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.799945] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 5323b250-fad8-4d71-81ed-c5e5eeb8aeab/5323b250-fad8-4d71-81ed-c5e5eeb8aeab.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 907.800345] env[61898]: DEBUG nova.compute.manager [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 907.807021] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8491bb1-eeb5-402f-97d4-a46d3bf27dfb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.824965] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 907.824965] env[61898]: value = "task-1240959" [ 907.824965] env[61898]: _type = "Task" [ 907.824965] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.835324] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240959, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.947226] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 907.947755] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e890afcc-fe40-4825-a705-a7c717bc99e5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.953221] env[61898]: DEBUG nova.compute.utils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 907.954134] env[61898]: DEBUG nova.compute.manager [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 907.954342] env[61898]: DEBUG nova.network.neutron [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 907.964701] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.967559] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24eb726c-b579-46a5-9421-fa66228fa872 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.991612] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.991952] env[61898]: DEBUG nova.compute.manager [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Instance network_info: |[{"id": "62f1251d-f84b-4c28-ab74-971fef0d640f", "address": "fa:16:3e:27:01:3e", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62f1251d-f8", "ovs_interfaceid": "62f1251d-f84b-4c28-ab74-971fef0d640f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 907.992508] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:01:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b56036cd-97ac-47f5-9089-7b38bfe99228', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62f1251d-f84b-4c28-ab74-971fef0d640f', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 908.004620] env[61898]: DEBUG oslo.service.loopingcall [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 908.004620] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 908.004620] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1d546c3-cddb-4326-9fcf-8340dc3c3453 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.027025] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 908.027025] env[61898]: value = "task-1240961" [ 908.027025] env[61898]: _type = "Task" [ 908.027025] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.038036] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240961, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.049024] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 908.049024] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 908.049024] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Deleting the datastore file [datastore2] 45b8dc91-b577-4548-bf3a-32c7c936c616 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.049024] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba69b858-4858-4bcd-8d41-c0204ab3b54b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.061453] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 908.061453] env[61898]: value = "task-1240962" [ 908.061453] env[61898]: _type = "Task" [ 908.061453] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.062179] env[61898]: DEBUG nova.compute.manager [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 908.062379] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 908.068564] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14320e3-1f29-4376-ae95-e2a2bd92940e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.077347] env[61898]: DEBUG nova.compute.manager [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Received event network-changed-62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 908.077347] env[61898]: DEBUG nova.compute.manager [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing instance network info cache due to event network-changed-62f1251d-f84b-4c28-ab74-971fef0d640f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 908.077347] env[61898]: DEBUG oslo_concurrency.lockutils [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] Acquiring lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.077347] env[61898]: DEBUG oslo_concurrency.lockutils [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] Acquired lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.077347] env[61898]: DEBUG nova.network.neutron [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing network info cache for port 62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 908.083495] env[61898]: DEBUG nova.policy [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f25c631adf3c4f68b374a35b767a9429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30bd396aa1ff45ad946bc1a6fdb3b40b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 908.090661] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240962, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.091025] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 908.093688] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02b2c930-ad1c-474b-82a2-6ea9a28bab53 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.101424] env[61898]: DEBUG oslo_vmware.api [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 908.101424] env[61898]: value = "task-1240963" [ 908.101424] env[61898]: _type = "Task" [ 908.101424] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.114456] env[61898]: DEBUG oslo_vmware.api [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240963, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.327309] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.337111] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240959, 'name': ReconfigVM_Task, 'duration_secs': 0.397792} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.337566] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 5323b250-fad8-4d71-81ed-c5e5eeb8aeab/5323b250-fad8-4d71-81ed-c5e5eeb8aeab.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 908.337938] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance '5323b250-fad8-4d71-81ed-c5e5eeb8aeab' progress to 50 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 908.363325] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fcb47d-ed3b-48de-8ae8-fa62fb5f1400 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.369809] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6829bf41-2f31-46a2-bd47-87068e091255 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.409335] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84da9fc8-6786-4946-afe4-109fa6f1c90d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.422229] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28ff2e0-407c-4f8f-a4c6-58b3cf30f8b0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.438710] env[61898]: DEBUG nova.compute.provider_tree [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.461341] env[61898]: DEBUG nova.compute.manager [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 908.536962] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240961, 'name': CreateVM_Task, 'duration_secs': 0.468995} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.538772] env[61898]: DEBUG nova.network.neutron [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updating instance_info_cache with network_info: [{"id": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "address": "fa:16:3e:fb:a9:9c", "network": {"id": "6b546ef4-b25d-43da-a6dc-d9bad3ad3f0f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1330389419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "025bf0ed02e24a998d2a6f7cf7ae77b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8ab1155-cd", "ovs_interfaceid": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.539941] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 908.541205] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.541378] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.541716] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 908.541987] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7b1d43c-2068-41eb-9a7b-2ac94d6a7e66 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.546755] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 908.546755] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]529400f6-d1f5-e1cf-e488-beafe76bfa83" [ 908.546755] env[61898]: _type = "Task" [ 908.546755] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.556495] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]529400f6-d1f5-e1cf-e488-beafe76bfa83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.570424] env[61898]: DEBUG oslo_vmware.api [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1240962, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169415} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.570706] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 908.570895] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 908.571081] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 908.595941] env[61898]: INFO nova.scheduler.client.report [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Deleted allocations for instance 45b8dc91-b577-4548-bf3a-32c7c936c616 [ 908.613953] env[61898]: DEBUG oslo_vmware.api [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240963, 'name': PowerOffVM_Task, 'duration_secs': 0.30781} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.613953] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.614397] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 908.614397] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fc1bc0b-95a9-4937-a294-1081e6fcce42 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.679135] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 908.679454] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 908.679689] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Deleting the datastore file [datastore2] 1fb4535d-47d8-45c5-b6d6-d05e57237b98 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.679995] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4940e3fd-e4fc-479f-95a8-1cfd864a7d6f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.686375] env[61898]: DEBUG oslo_vmware.api [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 908.686375] env[61898]: value = "task-1240965" [ 908.686375] env[61898]: _type = "Task" [ 908.686375] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.695509] env[61898]: DEBUG oslo_vmware.api [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240965, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.849982] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b61364-bb2e-4aa9-b962-3a196ca209f5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.871975] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdce255-308f-4996-91c9-0fd472b50fe9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.899390] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance '5323b250-fad8-4d71-81ed-c5e5eeb8aeab' progress to 67 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 908.942011] env[61898]: DEBUG nova.scheduler.client.report [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 908.969343] env[61898]: DEBUG nova.network.neutron [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Successfully created port: 53aab5ac-41d3-4125-8cee-3a013242a542 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 909.041597] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Releasing lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.067759] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]529400f6-d1f5-e1cf-e488-beafe76bfa83, 'name': SearchDatastore_Task, 'duration_secs': 0.008677} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.070038] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.070309] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 909.070546] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.070727] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.070917] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 909.071462] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e28d3638-619c-44cd-b0b2-17eea3c71d25 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.079919] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 909.080139] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 909.080867] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-033fa1c8-ce68-483c-883a-62e1159465ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.085803] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 909.085803] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]527cc03c-0435-f6ba-38a8-3ca8d77d9e91" [ 909.085803] env[61898]: _type = "Task" [ 909.085803] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.094601] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527cc03c-0435-f6ba-38a8-3ca8d77d9e91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.099881] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.131585] env[61898]: DEBUG nova.network.neutron [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updated VIF entry in instance network info cache for port 62f1251d-f84b-4c28-ab74-971fef0d640f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 909.131991] env[61898]: DEBUG nova.network.neutron [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updating instance_info_cache with network_info: [{"id": "62f1251d-f84b-4c28-ab74-971fef0d640f", "address": "fa:16:3e:27:01:3e", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62f1251d-f8", "ovs_interfaceid": "62f1251d-f84b-4c28-ab74-971fef0d640f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.197099] env[61898]: DEBUG oslo_vmware.api [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1240965, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168776} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.197424] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 909.197615] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 909.197792] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 909.197968] env[61898]: INFO nova.compute.manager [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Took 1.14 seconds to destroy the instance on the hypervisor. [ 909.198232] env[61898]: DEBUG oslo.service.loopingcall [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 909.198509] env[61898]: DEBUG nova.compute.manager [-] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 909.198509] env[61898]: DEBUG nova.network.neutron [-] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 909.447071] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.996s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.454210] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.884s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.454579] env[61898]: DEBUG nova.objects.instance [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lazy-loading 'resources' on Instance uuid cdd5f647-2c43-4389-820d-2d39d7d20889 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.470105] env[61898]: DEBUG nova.network.neutron [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Port dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a binding to destination host cpu-1 is already ACTIVE {{(pid=61898) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 909.473683] env[61898]: DEBUG nova.compute.manager [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 909.588039] env[61898]: DEBUG nova.virt.hardware [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 909.588453] env[61898]: DEBUG nova.virt.hardware [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 909.588667] env[61898]: DEBUG nova.virt.hardware [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 909.588925] env[61898]: DEBUG nova.virt.hardware [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 909.589252] env[61898]: DEBUG nova.virt.hardware [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 909.589419] env[61898]: DEBUG nova.virt.hardware [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 909.589690] env[61898]: DEBUG nova.virt.hardware [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 909.589966] env[61898]: DEBUG nova.virt.hardware [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 909.590585] env[61898]: DEBUG nova.virt.hardware [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 909.590585] env[61898]: DEBUG nova.virt.hardware [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 909.590585] env[61898]: DEBUG nova.virt.hardware [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 909.596040] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88d2939-80d8-4b0e-881f-7988d9d1f992 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.619499] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b459bf-97b8-471a-b71d-9b0fb987af81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.624792] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527cc03c-0435-f6ba-38a8-3ca8d77d9e91, 'name': SearchDatastore_Task, 'duration_secs': 0.008268} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.626397] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d316026-4664-4c32-afd8-17080c4d6f3b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.638559] env[61898]: DEBUG oslo_concurrency.lockutils [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] Releasing lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.638891] env[61898]: DEBUG nova.compute.manager [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Received event network-vif-unplugged-53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 909.639214] env[61898]: DEBUG oslo_concurrency.lockutils [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] Acquiring lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.639471] env[61898]: DEBUG oslo_concurrency.lockutils [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.639792] env[61898]: DEBUG oslo_concurrency.lockutils [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.640039] env[61898]: DEBUG nova.compute.manager [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] No waiting events found dispatching network-vif-unplugged-53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 909.640416] env[61898]: WARNING nova.compute.manager [req-f85e9888-436b-4763-8ff8-5de76d9a6a96 req-3d862172-0a4e-40e0-8f4b-eda1101a6a9d service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Received unexpected event network-vif-unplugged-53a6375d-a9c3-4c2e-8568-942c3c43bf4a for instance with vm_state shelved and task_state shelving_offloading. [ 909.643550] env[61898]: INFO nova.scheduler.client.report [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleted allocations for instance 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb [ 909.646246] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 909.646246] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52c50793-20a8-11cd-75c7-669b1aff4ef9" [ 909.646246] env[61898]: _type = "Task" [ 909.646246] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.659618] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c50793-20a8-11cd-75c7-669b1aff4ef9, 'name': SearchDatastore_Task, 'duration_secs': 0.009406} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.660042] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.660299] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52/bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 909.660737] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b411f4a3-c89d-4c19-be8c-149d16016d54 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.667467] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 909.667467] env[61898]: value = "task-1240966" [ 909.667467] env[61898]: _type = "Task" [ 909.667467] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.677052] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240966, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.110766] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.111203] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb2b2751-f106-4871-a1e9-d0df1d9b73da {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.127023] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 910.127023] env[61898]: value = "task-1240967" [ 910.127023] env[61898]: _type = "Task" [ 910.127023] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.140457] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.156773] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5e533e94-3a02-44f0-b811-92a1e2b4c5ff tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.856s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.176953] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240966, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505033} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.180220] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52/bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 910.180505] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 910.181854] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-088cb7de-e27a-428c-96ee-8e1944e450c2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.191864] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 910.191864] env[61898]: value = "task-1240968" [ 910.191864] env[61898]: _type = "Task" [ 910.191864] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.205497] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240968, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.313721] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02e299a7-1dd8-4748-9227-ddb71231247f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.322639] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2a8375-eb3c-4412-90de-67b5d41b02d9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.367359] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbffdb5-03cd-4215-961c-3ddc9f8a256e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.377891] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0b0ac1-d853-4d01-ba1e-bc4aaa980cba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.395541] env[61898]: DEBUG nova.compute.provider_tree [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.429785] env[61898]: DEBUG nova.compute.manager [req-eb2dcae4-fca5-490a-afeb-26e89a302230 req-4979ecb1-9d51-4c11-8929-110a00a6382f service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Received event network-changed-53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 910.430090] env[61898]: DEBUG nova.compute.manager [req-eb2dcae4-fca5-490a-afeb-26e89a302230 req-4979ecb1-9d51-4c11-8929-110a00a6382f service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Refreshing instance network info cache due to event network-changed-53a6375d-a9c3-4c2e-8568-942c3c43bf4a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 910.430479] env[61898]: DEBUG oslo_concurrency.lockutils [req-eb2dcae4-fca5-490a-afeb-26e89a302230 req-4979ecb1-9d51-4c11-8929-110a00a6382f service nova] Acquiring lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.430759] env[61898]: DEBUG oslo_concurrency.lockutils [req-eb2dcae4-fca5-490a-afeb-26e89a302230 req-4979ecb1-9d51-4c11-8929-110a00a6382f service nova] Acquired lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.432187] env[61898]: DEBUG nova.network.neutron [req-eb2dcae4-fca5-490a-afeb-26e89a302230 req-4979ecb1-9d51-4c11-8929-110a00a6382f service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Refreshing network info cache for port 53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 910.504801] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.505138] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.505546] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.641754] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240967, 'name': PowerOffVM_Task, 'duration_secs': 0.27424} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.642053] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.643059] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e10a47-73a3-4a4c-8eb4-b1610ece9d56 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.663639] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d866b05-b6a8-4da3-afe8-9603ab9ea476 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.705253] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240968, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083994} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.707499] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 910.709909] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4b1729-6adb-4d58-836c-29ac3f524054 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.713406] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.713660] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4d603e9-5e43-4457-b721-943771e78e9b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.733828] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52/bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 910.734720] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d40f394-9676-4940-bc88-e705610abad9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.752048] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 910.752048] env[61898]: value = "task-1240969" [ 910.752048] env[61898]: _type = "Task" [ 910.752048] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.757477] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 910.757477] env[61898]: value = "task-1240970" [ 910.757477] env[61898]: _type = "Task" [ 910.757477] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.764360] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] VM already powered off {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 910.764597] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 910.764844] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.764992] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.765183] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.765768] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6904dec7-0419-484d-b5a7-d1bc604d5b29 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.770695] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240970, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.777038] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.777038] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 910.777735] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb7a34d1-a04f-4a00-8847-93e5bdb8bcd1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.782981] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 910.782981] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5283b184-99b7-83c8-2bff-ab23243e99e4" [ 910.782981] env[61898]: _type = "Task" [ 910.782981] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.791545] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5283b184-99b7-83c8-2bff-ab23243e99e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.871237] env[61898]: DEBUG nova.network.neutron [-] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.900694] env[61898]: DEBUG nova.scheduler.client.report [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 911.046534] env[61898]: DEBUG oslo_vmware.rw_handles [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527bc379-2a48-3a05-eef2-86bf71682948/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 911.047698] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6017ffe6-aa5e-4f6b-bfef-6291119d615c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.057114] env[61898]: DEBUG oslo_vmware.rw_handles [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527bc379-2a48-3a05-eef2-86bf71682948/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 911.057343] env[61898]: ERROR oslo_vmware.rw_handles [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527bc379-2a48-3a05-eef2-86bf71682948/disk-0.vmdk due to incomplete transfer. [ 911.057570] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-81102556-be5c-4cb1-9139-541aee07b5d2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.064289] env[61898]: DEBUG oslo_vmware.rw_handles [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527bc379-2a48-3a05-eef2-86bf71682948/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 911.064482] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Uploaded image 7d207fb3-39a4-452d-a133-40f06b6cc713 to the Glance image server {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 911.066923] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 911.067218] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-417a84f2-f806-49c8-a7db-eb68ba972f0a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.073619] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 911.073619] env[61898]: value = "task-1240971" [ 911.073619] env[61898]: _type = "Task" [ 911.073619] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.083072] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240971, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.275866] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240970, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.293879] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5283b184-99b7-83c8-2bff-ab23243e99e4, 'name': SearchDatastore_Task, 'duration_secs': 0.009171} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.294701] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d40ff51-72c0-4a4d-b3cb-4678510c9387 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.300317] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 911.300317] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5296eae9-3e7c-33dd-b92c-b79deaa5b26d" [ 911.300317] env[61898]: _type = "Task" [ 911.300317] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.307803] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5296eae9-3e7c-33dd-b92c-b79deaa5b26d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.375294] env[61898]: INFO nova.compute.manager [-] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Took 2.17 seconds to deallocate network for instance. [ 911.406439] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.952s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.409292] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.177s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.409964] env[61898]: DEBUG nova.objects.instance [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lazy-loading 'pci_requests' on Instance uuid 070bc0cc-ff77-48b8-bd08-f17fe69e25af {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.419862] env[61898]: DEBUG nova.network.neutron [req-eb2dcae4-fca5-490a-afeb-26e89a302230 req-4979ecb1-9d51-4c11-8929-110a00a6382f service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updated VIF entry in instance network info cache for port 53a6375d-a9c3-4c2e-8568-942c3c43bf4a. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 911.420330] env[61898]: DEBUG nova.network.neutron [req-eb2dcae4-fca5-490a-afeb-26e89a302230 req-4979ecb1-9d51-4c11-8929-110a00a6382f service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updating instance_info_cache with network_info: [{"id": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "address": "fa:16:3e:be:2b:9a", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap53a6375d-a9", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.430750] env[61898]: INFO nova.scheduler.client.report [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Deleted allocations for instance cdd5f647-2c43-4389-820d-2d39d7d20889 [ 911.477985] env[61898]: DEBUG nova.network.neutron [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Successfully updated port: 53aab5ac-41d3-4125-8cee-3a013242a542 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 911.586808] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240971, 'name': Destroy_Task, 'duration_secs': 0.450912} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.587283] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Destroyed the VM [ 911.587469] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 911.587654] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9aba5ae4-d075-4bc1-97b8-1186f567c042 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.594538] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 911.594538] env[61898]: value = "task-1240972" [ 911.594538] env[61898]: _type = "Task" [ 911.594538] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.603104] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240972, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.620213] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.620530] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.620715] env[61898]: DEBUG nova.network.neutron [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.770929] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240970, 'name': ReconfigVM_Task, 'duration_secs': 0.556903} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.772092] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Reconfigured VM instance instance-00000059 to attach disk [datastore1] bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52/bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 911.772092] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-421f2cb7-5693-4772-8dbc-121443f1ac46 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.777941] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 911.777941] env[61898]: value = "task-1240973" [ 911.777941] env[61898]: _type = "Task" [ 911.777941] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.788259] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240973, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.813600] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5296eae9-3e7c-33dd-b92c-b79deaa5b26d, 'name': SearchDatastore_Task, 'duration_secs': 0.00925} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.813974] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.814193] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk. {{(pid=61898) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 911.814630] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "45b8dc91-b577-4548-bf3a-32c7c936c616" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.814819] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77844aa8-164d-4457-85b9-e00318d7627c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.821734] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 911.821734] env[61898]: value = "task-1240974" [ 911.821734] env[61898]: _type = "Task" [ 911.821734] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.830503] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.881032] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.914791] env[61898]: DEBUG nova.objects.instance [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lazy-loading 'numa_topology' on Instance uuid 070bc0cc-ff77-48b8-bd08-f17fe69e25af {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.924061] env[61898]: DEBUG oslo_concurrency.lockutils [req-eb2dcae4-fca5-490a-afeb-26e89a302230 req-4979ecb1-9d51-4c11-8929-110a00a6382f service nova] Releasing lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.940504] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1b83509c-d04f-4a72-a7f2-ee7fc5830f9e tempest-MultipleCreateTestJSON-1762470781 tempest-MultipleCreateTestJSON-1762470781-project-member] Lock "cdd5f647-2c43-4389-820d-2d39d7d20889" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.550s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.988053] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "refresh_cache-cd1335b7-78b7-4cea-add7-dd69736067b0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.988053] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "refresh_cache-cd1335b7-78b7-4cea-add7-dd69736067b0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.988053] env[61898]: DEBUG nova.network.neutron [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.103980] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240972, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.293582] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240973, 'name': Rename_Task, 'duration_secs': 0.165556} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.297025] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 912.297025] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-666acab5-4169-4072-abbb-e2cc45f13f6a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.303260] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 912.303260] env[61898]: value = "task-1240975" [ 912.303260] env[61898]: _type = "Task" [ 912.303260] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.311840] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240975, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.334395] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240974, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.421049] env[61898]: INFO nova.compute.claims [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 912.551660] env[61898]: DEBUG nova.network.neutron [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.570448] env[61898]: DEBUG nova.compute.manager [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Received event network-vif-deleted-a9c5b33c-5075-4ced-8700-0ca1e0071262 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 912.571272] env[61898]: DEBUG nova.compute.manager [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Received event network-vif-plugged-53aab5ac-41d3-4125-8cee-3a013242a542 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 912.571541] env[61898]: DEBUG oslo_concurrency.lockutils [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] Acquiring lock "cd1335b7-78b7-4cea-add7-dd69736067b0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.571819] env[61898]: DEBUG oslo_concurrency.lockutils [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] Lock "cd1335b7-78b7-4cea-add7-dd69736067b0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.572330] env[61898]: DEBUG oslo_concurrency.lockutils [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] Lock "cd1335b7-78b7-4cea-add7-dd69736067b0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.573143] env[61898]: DEBUG nova.compute.manager [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] No waiting events found dispatching network-vif-plugged-53aab5ac-41d3-4125-8cee-3a013242a542 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 912.573774] env[61898]: WARNING nova.compute.manager [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Received unexpected event network-vif-plugged-53aab5ac-41d3-4125-8cee-3a013242a542 for instance with vm_state building and task_state spawning. [ 912.574007] env[61898]: DEBUG nova.compute.manager [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Received event network-changed-53aab5ac-41d3-4125-8cee-3a013242a542 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 912.574207] env[61898]: DEBUG nova.compute.manager [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Refreshing instance network info cache due to event network-changed-53aab5ac-41d3-4125-8cee-3a013242a542. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 912.574397] env[61898]: DEBUG oslo_concurrency.lockutils [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] Acquiring lock "refresh_cache-cd1335b7-78b7-4cea-add7-dd69736067b0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.606701] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240972, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.803293] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.804173] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.817862] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240975, 'name': PowerOnVM_Task} progress is 76%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.832025] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240974, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536747} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.832531] env[61898]: INFO nova.virt.vmwareapi.ds_util [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk. [ 912.833196] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250126af-74a6-498f-800a-0092d4d2e4bd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.867560] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 912.867921] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e16e135f-51dc-45bc-91d5-3cdc4a63fb91 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.897785] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 912.897785] env[61898]: value = "task-1240976" [ 912.897785] env[61898]: _type = "Task" [ 912.897785] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.908984] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240976, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.007333] env[61898]: DEBUG nova.network.neutron [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance_info_cache with network_info: [{"id": "dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a", "address": "fa:16:3e:08:d8:91", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc3e4cf3-8b", "ovs_interfaceid": "dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.014251] env[61898]: DEBUG nova.network.neutron [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Updating instance_info_cache with network_info: [{"id": "53aab5ac-41d3-4125-8cee-3a013242a542", "address": "fa:16:3e:bd:05:39", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53aab5ac-41", "ovs_interfaceid": "53aab5ac-41d3-4125-8cee-3a013242a542", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.108893] env[61898]: DEBUG oslo_vmware.api [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1240972, 'name': RemoveSnapshot_Task, 'duration_secs': 1.201693} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.109273] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 913.109625] env[61898]: INFO nova.compute.manager [None req-ab0f8162-fb56-4dfb-aec8-027d2686dfd9 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Took 15.56 seconds to snapshot the instance on the hypervisor. [ 913.307749] env[61898]: DEBUG nova.compute.manager [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 913.323337] env[61898]: DEBUG oslo_vmware.api [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1240975, 'name': PowerOnVM_Task, 'duration_secs': 0.951959} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.323337] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 913.323337] env[61898]: INFO nova.compute.manager [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Took 8.15 seconds to spawn the instance on the hypervisor. [ 913.323337] env[61898]: DEBUG nova.compute.manager [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 913.324792] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b435e786-f9be-4abf-aff7-37682132fe51 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.409524] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240976, 'name': ReconfigVM_Task, 'duration_secs': 0.393611} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.410325] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Reconfigured VM instance instance-00000058 to attach disk [datastore2] b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4/e07a6c11-ab12-4187-81fc-1a28a9d1e65d-rescue.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 913.411204] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e4a803-e0ba-44da-8889-7106fdd429dc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.438935] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2176f4dd-fc97-4ba2-8750-c4e7a7504107 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.456805] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 913.456805] env[61898]: value = "task-1240977" [ 913.456805] env[61898]: _type = "Task" [ 913.456805] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.468489] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240977, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.510805] env[61898]: DEBUG oslo_concurrency.lockutils [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.517987] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "refresh_cache-cd1335b7-78b7-4cea-add7-dd69736067b0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.518276] env[61898]: DEBUG nova.compute.manager [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Instance network_info: |[{"id": "53aab5ac-41d3-4125-8cee-3a013242a542", "address": "fa:16:3e:bd:05:39", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53aab5ac-41", "ovs_interfaceid": "53aab5ac-41d3-4125-8cee-3a013242a542", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 913.518597] env[61898]: DEBUG oslo_concurrency.lockutils [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] Acquired lock "refresh_cache-cd1335b7-78b7-4cea-add7-dd69736067b0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.518776] env[61898]: DEBUG nova.network.neutron [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Refreshing network info cache for port 53aab5ac-41d3-4125-8cee-3a013242a542 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.520659] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:05:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4223acd2-30f7-440e-b975-60b30d931694', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53aab5ac-41d3-4125-8cee-3a013242a542', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 913.528368] env[61898]: DEBUG oslo.service.loopingcall [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 913.532608] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 913.533250] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bac4a247-4fac-4f48-ac4a-b36cdce29c53 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.557227] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 913.557227] env[61898]: value = "task-1240978" [ 913.557227] env[61898]: _type = "Task" [ 913.557227] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.573334] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240978, 'name': CreateVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.739425] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6291dab6-d1e5-4584-b4ab-eebcad55b56a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.747279] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06394fc8-61d9-433c-b96e-38b2b8175235 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.778255] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b1c8cf-c360-407a-87a8-1b291fceba11 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.786201] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fa5ac7-3cfe-4080-b991-9811494d5a8f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.799807] env[61898]: DEBUG nova.compute.provider_tree [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.831591] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.840910] env[61898]: INFO nova.compute.manager [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Took 23.00 seconds to build instance. [ 913.967661] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240977, 'name': ReconfigVM_Task, 'duration_secs': 0.212461} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.967661] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 913.967878] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83753742-6237-4689-b347-74b56af8481d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.973778] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 913.973778] env[61898]: value = "task-1240979" [ 913.973778] env[61898]: _type = "Task" [ 913.973778] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.983331] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240979, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.032963] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46763506-18b1-4f52-90ee-84968586a44e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.052174] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef886411-e733-40a9-9a69-3a00e25614a2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.059651] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance '5323b250-fad8-4d71-81ed-c5e5eeb8aeab' progress to 83 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 914.072563] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240978, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.303207] env[61898]: DEBUG nova.scheduler.client.report [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 914.342649] env[61898]: DEBUG oslo_concurrency.lockutils [None req-99759969-fea8-4026-bd15-b538c8aaaa53 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.531s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.438871] env[61898]: DEBUG nova.network.neutron [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Updated VIF entry in instance network info cache for port 53aab5ac-41d3-4125-8cee-3a013242a542. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 914.439370] env[61898]: DEBUG nova.network.neutron [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Updating instance_info_cache with network_info: [{"id": "53aab5ac-41d3-4125-8cee-3a013242a542", "address": "fa:16:3e:bd:05:39", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53aab5ac-41", "ovs_interfaceid": "53aab5ac-41d3-4125-8cee-3a013242a542", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.484440] env[61898]: DEBUG oslo_vmware.api [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240979, 'name': PowerOnVM_Task, 'duration_secs': 0.428132} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.484740] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 914.491355] env[61898]: DEBUG nova.compute.manager [None req-42f9011b-0373-4775-a3e5-4f91dc031019 tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 914.492346] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22c717a-a4c4-4ab8-964c-1c20a66bbbc0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.572962] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.573208] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240978, 'name': CreateVM_Task, 'duration_secs': 0.612868} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.573411] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31aa8f1d-9ae9-4c80-9001-0b6396c2e06a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.574942] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 914.578018] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.578280] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.578690] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 914.579110] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e659762d-8188-4e38-a288-07f47040b86e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.585340] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 914.585340] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5270bc0c-fb96-862b-ab61-684b7894005c" [ 914.585340] env[61898]: _type = "Task" [ 914.585340] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.585610] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 914.585610] env[61898]: value = "task-1240980" [ 914.585610] env[61898]: _type = "Task" [ 914.585610] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.598302] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5270bc0c-fb96-862b-ab61-684b7894005c, 'name': SearchDatastore_Task, 'duration_secs': 0.009858} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.601333] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.601594] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.601836] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.601989] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.602190] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.602820] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240980, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.603064] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9394f6cf-f661-4117-8ba1-36a356c0c140 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.610820] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.611028] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.611873] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dac6fe05-f914-44f6-bcd6-910e2daa03cf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.616789] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 914.616789] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]522cec0e-e328-c0c2-95fb-a3bda462dfda" [ 914.616789] env[61898]: _type = "Task" [ 914.616789] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.625107] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522cec0e-e328-c0c2-95fb-a3bda462dfda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.808600] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.399s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.813617] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.894s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.813617] env[61898]: DEBUG nova.objects.instance [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lazy-loading 'resources' on Instance uuid 80931b22-a69b-41cd-b707-13bf11111b88 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.870511] env[61898]: INFO nova.network.neutron [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updating port cd3bd232-226d-4ac0-a9f8-17b93aca92fb with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 914.942016] env[61898]: DEBUG oslo_concurrency.lockutils [req-4008c4a7-7f5c-490c-80c6-42de1eb25af2 req-dff3be90-faa6-428e-99fd-53f2c80cfc53 service nova] Releasing lock "refresh_cache-cd1335b7-78b7-4cea-add7-dd69736067b0" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.095787] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240980, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.127379] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522cec0e-e328-c0c2-95fb-a3bda462dfda, 'name': SearchDatastore_Task, 'duration_secs': 0.008421} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.128150] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c98df64-5a9e-44e0-aef4-ca6b8ab09e3b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.133586] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 915.133586] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521b6637-3a4f-caa7-617f-4f1929a291b8" [ 915.133586] env[61898]: _type = "Task" [ 915.133586] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.141869] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521b6637-3a4f-caa7-617f-4f1929a291b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.595485] env[61898]: DEBUG oslo_vmware.api [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1240980, 'name': PowerOnVM_Task, 'duration_secs': 0.806019} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.598197] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.598407] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-daa95779-8d80-40fb-8322-f5e03db94773 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance '5323b250-fad8-4d71-81ed-c5e5eeb8aeab' progress to 100 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 915.615042] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a703692-2c31-4198-85f7-8a249cf3f237 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.622338] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198d11b3-18d3-4c0b-af23-cccf73cd4a99 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.655357] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67d42d0-7bb8-4e58-9e18-105ea972e368 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.666910] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b92b5e3-4f86-41fc-89c7-acadd6d1e5af {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.670908] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521b6637-3a4f-caa7-617f-4f1929a291b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009153} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.671521] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.671844] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] cd1335b7-78b7-4cea-add7-dd69736067b0/cd1335b7-78b7-4cea-add7-dd69736067b0.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 915.672452] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de065c6e-b277-4fd4-ba16-706c172d3559 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.684510] env[61898]: DEBUG nova.compute.provider_tree [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.691695] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 915.691695] env[61898]: value = "task-1240981" [ 915.691695] env[61898]: _type = "Task" [ 915.691695] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.705641] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240981, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.023419] env[61898]: DEBUG nova.compute.manager [req-2258b8e2-8f88-4872-90c7-d4aaaf147338 req-30ba35d7-65f5-4cf1-914f-d5ee492aefd6 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Received event network-changed-62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 916.025309] env[61898]: DEBUG nova.compute.manager [req-2258b8e2-8f88-4872-90c7-d4aaaf147338 req-30ba35d7-65f5-4cf1-914f-d5ee492aefd6 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing instance network info cache due to event network-changed-62f1251d-f84b-4c28-ab74-971fef0d640f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 916.025309] env[61898]: DEBUG oslo_concurrency.lockutils [req-2258b8e2-8f88-4872-90c7-d4aaaf147338 req-30ba35d7-65f5-4cf1-914f-d5ee492aefd6 service nova] Acquiring lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 916.025309] env[61898]: DEBUG oslo_concurrency.lockutils [req-2258b8e2-8f88-4872-90c7-d4aaaf147338 req-30ba35d7-65f5-4cf1-914f-d5ee492aefd6 service nova] Acquired lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.025309] env[61898]: DEBUG nova.network.neutron [req-2258b8e2-8f88-4872-90c7-d4aaaf147338 req-30ba35d7-65f5-4cf1-914f-d5ee492aefd6 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing network info cache for port 62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.188337] env[61898]: DEBUG nova.scheduler.client.report [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 916.202454] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240981, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505332} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.202788] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] cd1335b7-78b7-4cea-add7-dd69736067b0/cd1335b7-78b7-4cea-add7-dd69736067b0.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 916.203181] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 916.203846] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8892dfd-4567-493d-b494-1a241079d471 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.210543] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 916.210543] env[61898]: value = "task-1240982" [ 916.210543] env[61898]: _type = "Task" [ 916.210543] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.219516] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240982, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.693638] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.882s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.696394] env[61898]: DEBUG oslo_concurrency.lockutils [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.927s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.696682] env[61898]: DEBUG nova.objects.instance [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lazy-loading 'resources' on Instance uuid 86367a82-239b-4f6e-b306-d9661eadf95e {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.720709] env[61898]: INFO nova.scheduler.client.report [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Deleted allocations for instance 80931b22-a69b-41cd-b707-13bf11111b88 [ 916.725640] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240982, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068705} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.728815] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 916.730053] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3462fd47-12af-4c32-8653-a9b642715e82 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.756046] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] cd1335b7-78b7-4cea-add7-dd69736067b0/cd1335b7-78b7-4cea-add7-dd69736067b0.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.756046] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56d0c898-17aa-426e-b279-7c6f88a16017 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.778523] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 916.778523] env[61898]: value = "task-1240983" [ 916.778523] env[61898]: _type = "Task" [ 916.778523] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.787272] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240983, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.034298] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.034670] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.235522] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17873bef-995e-4935-97fb-ce13d6ff5d84 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "80931b22-a69b-41cd-b707-13bf11111b88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.340s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.293245] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.394788] env[61898]: DEBUG nova.network.neutron [req-2258b8e2-8f88-4872-90c7-d4aaaf147338 req-30ba35d7-65f5-4cf1-914f-d5ee492aefd6 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updated VIF entry in instance network info cache for port 62f1251d-f84b-4c28-ab74-971fef0d640f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 917.395191] env[61898]: DEBUG nova.network.neutron [req-2258b8e2-8f88-4872-90c7-d4aaaf147338 req-30ba35d7-65f5-4cf1-914f-d5ee492aefd6 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updating instance_info_cache with network_info: [{"id": "62f1251d-f84b-4c28-ab74-971fef0d640f", "address": "fa:16:3e:27:01:3e", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62f1251d-f8", "ovs_interfaceid": "62f1251d-f84b-4c28-ab74-971fef0d640f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.513584] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fd57ce-bd62-4b72-a2bc-27ed62e15522 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.524606] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5286b8-1123-4296-a01e-049b14d43bcc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.563161] env[61898]: DEBUG nova.compute.manager [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 917.568380] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.568822] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.569071] env[61898]: DEBUG nova.network.neutron [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.570791] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9591c5-bcd9-4257-bce1-62283786825c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.581540] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a017d2e0-4df1-42a4-b1a6-e244a5b51856 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.598363] env[61898]: DEBUG nova.compute.provider_tree [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.789709] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240983, 'name': ReconfigVM_Task, 'duration_secs': 0.810057} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.790047] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Reconfigured VM instance instance-0000005a to attach disk [datastore1] cd1335b7-78b7-4cea-add7-dd69736067b0/cd1335b7-78b7-4cea-add7-dd69736067b0.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.790728] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb442e3c-9bf6-422f-a685-768d5009e0f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.797624] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 917.797624] env[61898]: value = "task-1240984" [ 917.797624] env[61898]: _type = "Task" [ 917.797624] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.805416] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240984, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.897915] env[61898]: DEBUG oslo_concurrency.lockutils [req-2258b8e2-8f88-4872-90c7-d4aaaf147338 req-30ba35d7-65f5-4cf1-914f-d5ee492aefd6 service nova] Releasing lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.034866] env[61898]: DEBUG oslo_concurrency.lockutils [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.035194] env[61898]: DEBUG oslo_concurrency.lockutils [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.035330] env[61898]: DEBUG nova.compute.manager [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Going to confirm migration 1 {{(pid=61898) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5112}} [ 918.088918] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.101354] env[61898]: DEBUG nova.scheduler.client.report [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 918.307076] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240984, 'name': Rename_Task, 'duration_secs': 0.306867} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.307401] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.307683] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-597e5bc3-5af5-4059-bcd1-83c7403901cd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.315222] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 918.315222] env[61898]: value = "task-1240985" [ 918.315222] env[61898]: _type = "Task" [ 918.315222] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.322653] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240985, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.413461] env[61898]: DEBUG nova.compute.manager [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Received event network-changed-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 918.413666] env[61898]: DEBUG nova.compute.manager [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Refreshing instance network info cache due to event network-changed-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 918.413883] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Acquiring lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.414319] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Acquired lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.414585] env[61898]: DEBUG nova.network.neutron [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Refreshing network info cache for port f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 918.502580] env[61898]: DEBUG oslo_concurrency.lockutils [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.502852] env[61898]: DEBUG oslo_concurrency.lockutils [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.503088] env[61898]: DEBUG oslo_concurrency.lockutils [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.503289] env[61898]: DEBUG oslo_concurrency.lockutils [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.503460] env[61898]: DEBUG oslo_concurrency.lockutils [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.505639] env[61898]: INFO nova.compute.manager [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Terminating instance [ 918.519616] env[61898]: DEBUG nova.network.neutron [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updating instance_info_cache with network_info: [{"id": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "address": "fa:16:3e:0e:f5:a4", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd3bd232-22", "ovs_interfaceid": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.606211] env[61898]: DEBUG oslo_concurrency.lockutils [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.910s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.608521] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 14.506s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.628478] env[61898]: INFO nova.scheduler.client.report [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleted allocations for instance 86367a82-239b-4f6e-b306-d9661eadf95e [ 918.701264] env[61898]: DEBUG oslo_concurrency.lockutils [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.701807] env[61898]: DEBUG oslo_concurrency.lockutils [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.701807] env[61898]: DEBUG nova.network.neutron [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 918.701988] env[61898]: DEBUG nova.objects.instance [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lazy-loading 'info_cache' on Instance uuid 5323b250-fad8-4d71-81ed-c5e5eeb8aeab {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.825366] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240985, 'name': PowerOnVM_Task} progress is 76%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.012017] env[61898]: DEBUG nova.compute.manager [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 919.012017] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 919.012017] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9eca2ab-ffa6-46c0-8d8e-8401f6bf205e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.018810] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 919.019234] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-408a095f-6d4b-43ad-a11c-b63c5d7f2b8e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.021353] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.029313] env[61898]: DEBUG oslo_vmware.api [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 919.029313] env[61898]: value = "task-1240986" [ 919.029313] env[61898]: _type = "Task" [ 919.029313] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.039255] env[61898]: DEBUG oslo_vmware.api [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.066840] env[61898]: DEBUG nova.virt.hardware [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ed575efd0fcc906044694814653a34ac',container_format='bare',created_at=2024-10-10T12:00:21Z,direct_url=,disk_format='vmdk',id=5d955d84-2f56-40e7-a5a1-0f6937a182cf,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1624870473-shelved',owner='a6198f817d1b471483500fe05c9bef3f',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2024-10-10T12:00:35Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 919.067641] env[61898]: DEBUG nova.virt.hardware [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 919.067964] env[61898]: DEBUG nova.virt.hardware [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 919.068319] env[61898]: DEBUG nova.virt.hardware [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 919.072222] env[61898]: DEBUG nova.virt.hardware [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 919.072222] env[61898]: DEBUG nova.virt.hardware [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 919.072222] env[61898]: DEBUG nova.virt.hardware [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 919.072222] env[61898]: DEBUG nova.virt.hardware [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 919.072222] env[61898]: DEBUG nova.virt.hardware [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 919.072222] env[61898]: DEBUG nova.virt.hardware [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 919.072222] env[61898]: DEBUG nova.virt.hardware [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 919.072222] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e30d188-e26a-4936-9fd6-05960a59e843 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.084077] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b68680-451b-4dc7-a313-be40c89b8a40 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.099028] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:f5:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd3bd232-226d-4ac0-a9f8-17b93aca92fb', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 919.106177] env[61898]: DEBUG oslo.service.loopingcall [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 919.106657] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 919.107075] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1301f4fd-67bd-4092-8992-15e79200330b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.125223] env[61898]: INFO nova.compute.claims [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 919.136894] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 919.136894] env[61898]: value = "task-1240987" [ 919.136894] env[61898]: _type = "Task" [ 919.136894] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.137520] env[61898]: DEBUG oslo_concurrency.lockutils [None req-316cc583-a527-46d7-95bc-48dc25dd7d09 tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "86367a82-239b-4f6e-b306-d9661eadf95e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.058s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.148606] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240987, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.325348] env[61898]: DEBUG oslo_vmware.api [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1240985, 'name': PowerOnVM_Task, 'duration_secs': 0.96938} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.325669] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 919.327325] env[61898]: INFO nova.compute.manager [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Took 9.85 seconds to spawn the instance on the hypervisor. [ 919.327577] env[61898]: DEBUG nova.compute.manager [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 919.328412] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367b341a-01d2-46ae-802d-4653719c492d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.404040] env[61898]: DEBUG nova.network.neutron [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updated VIF entry in instance network info cache for port f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 919.404580] env[61898]: DEBUG nova.network.neutron [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updating instance_info_cache with network_info: [{"id": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "address": "fa:16:3e:fb:a9:9c", "network": {"id": "6b546ef4-b25d-43da-a6dc-d9bad3ad3f0f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1330389419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "025bf0ed02e24a998d2a6f7cf7ae77b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8ab1155-cd", "ovs_interfaceid": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.538223] env[61898]: DEBUG oslo_vmware.api [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240986, 'name': PowerOffVM_Task, 'duration_secs': 0.254618} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.538949] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 919.538949] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 919.539089] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07589687-6ff5-499b-99b8-c6ebf7a9d2d1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.634795] env[61898]: INFO nova.compute.resource_tracker [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating resource usage from migration 349e2a4f-d827-4046-80e3-17c5d05dd026 [ 919.647455] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1240987, 'name': CreateVM_Task, 'duration_secs': 0.344168} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.647663] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 919.648347] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.648515] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.648889] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 919.649181] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0dce04a-accf-4c14-8d9c-83e636f8d88b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.654543] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 919.654543] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5204ea89-19f5-bec3-2eb3-b6ae4bf73bf2" [ 919.654543] env[61898]: _type = "Task" [ 919.654543] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.669759] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.673030] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Processing image 5d955d84-2f56-40e7-a5a1-0f6937a182cf {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 919.673030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf/5d955d84-2f56-40e7-a5a1-0f6937a182cf.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.673030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf/5d955d84-2f56-40e7-a5a1-0f6937a182cf.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.673030] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 919.673030] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce962926-5131-4db1-8cd5-80e27e1bf0e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.681712] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 919.681712] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 919.681712] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4820940a-67f1-4355-aa85-e9ad58c1e21e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.685236] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 919.685236] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a7bb96-2e39-388f-8db0-525acf22afce" [ 919.685236] env[61898]: _type = "Task" [ 919.685236] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.696455] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a7bb96-2e39-388f-8db0-525acf22afce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.852795] env[61898]: INFO nova.compute.manager [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Took 23.40 seconds to build instance. [ 919.899886] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8270370a-d516-4ba5-acd3-25202d9e6058 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.907428] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Releasing lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.907653] env[61898]: DEBUG nova.compute.manager [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Received event network-vif-plugged-cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 919.907852] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Acquiring lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.908062] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.908238] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.908409] env[61898]: DEBUG nova.compute.manager [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] No waiting events found dispatching network-vif-plugged-cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 919.908583] env[61898]: WARNING nova.compute.manager [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Received unexpected event network-vif-plugged-cd3bd232-226d-4ac0-a9f8-17b93aca92fb for instance with vm_state shelved_offloaded and task_state spawning. [ 919.908748] env[61898]: DEBUG nova.compute.manager [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Received event network-changed-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 919.908906] env[61898]: DEBUG nova.compute.manager [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Refreshing instance network info cache due to event network-changed-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 919.909118] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Acquiring lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 919.909276] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Acquired lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.909439] env[61898]: DEBUG nova.network.neutron [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Refreshing network info cache for port f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 919.911326] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-418e8d60-bec5-4d26-88f4-fe12444011ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.948642] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be48af5-6772-4461-9227-96d2eb793daa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.956751] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0e1057-6548-4923-b8c0-494e5814b10e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.974907] env[61898]: DEBUG nova.compute.provider_tree [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.123355] env[61898]: DEBUG nova.network.neutron [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance_info_cache with network_info: [{"id": "dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a", "address": "fa:16:3e:08:d8:91", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc3e4cf3-8b", "ovs_interfaceid": "dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.196647] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Preparing fetch location {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 920.196920] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Fetch image to [datastore2] OSTACK_IMG_01d4cbf7-83b5-4c4d-8a65-437a680263e4/OSTACK_IMG_01d4cbf7-83b5-4c4d-8a65-437a680263e4.vmdk {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 920.197123] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Downloading stream optimized image 5d955d84-2f56-40e7-a5a1-0f6937a182cf to [datastore2] OSTACK_IMG_01d4cbf7-83b5-4c4d-8a65-437a680263e4/OSTACK_IMG_01d4cbf7-83b5-4c4d-8a65-437a680263e4.vmdk on the data store datastore2 as vApp {{(pid=61898) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 920.197422] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Downloading image file data 5d955d84-2f56-40e7-a5a1-0f6937a182cf to the ESX as VM named 'OSTACK_IMG_01d4cbf7-83b5-4c4d-8a65-437a680263e4' {{(pid=61898) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 920.243174] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "aab10d8f-0d25-4351-a627-7222be63895e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.243436] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "aab10d8f-0d25-4351-a627-7222be63895e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.243647] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "aab10d8f-0d25-4351-a627-7222be63895e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.243830] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "aab10d8f-0d25-4351-a627-7222be63895e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.244013] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "aab10d8f-0d25-4351-a627-7222be63895e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.248443] env[61898]: INFO nova.compute.manager [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Terminating instance [ 920.286100] env[61898]: DEBUG oslo_vmware.rw_handles [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 920.286100] env[61898]: value = "resgroup-9" [ 920.286100] env[61898]: _type = "ResourcePool" [ 920.286100] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 920.286522] env[61898]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-affe3c36-f846-4dff-99a3-e3c8dc8002f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.322963] env[61898]: DEBUG oslo_vmware.rw_handles [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lease: (returnval){ [ 920.322963] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]529321da-f482-d51a-1595-e94b19048951" [ 920.322963] env[61898]: _type = "HttpNfcLease" [ 920.322963] env[61898]: } obtained for vApp import into resource pool (val){ [ 920.322963] env[61898]: value = "resgroup-9" [ 920.322963] env[61898]: _type = "ResourcePool" [ 920.322963] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 920.323420] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the lease: (returnval){ [ 920.323420] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]529321da-f482-d51a-1595-e94b19048951" [ 920.323420] env[61898]: _type = "HttpNfcLease" [ 920.323420] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 920.331674] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 920.331674] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]529321da-f482-d51a-1595-e94b19048951" [ 920.331674] env[61898]: _type = "HttpNfcLease" [ 920.331674] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 920.355942] env[61898]: DEBUG oslo_concurrency.lockutils [None req-bf928253-20b0-4335-b151-6b48fe68ecdd tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "cd1335b7-78b7-4cea-add7-dd69736067b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.911s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.478256] env[61898]: DEBUG nova.scheduler.client.report [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 920.626427] env[61898]: DEBUG oslo_concurrency.lockutils [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "refresh_cache-5323b250-fad8-4d71-81ed-c5e5eeb8aeab" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 920.626729] env[61898]: DEBUG nova.objects.instance [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lazy-loading 'migration_context' on Instance uuid 5323b250-fad8-4d71-81ed-c5e5eeb8aeab {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 920.754663] env[61898]: DEBUG nova.compute.manager [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 920.754983] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 920.756043] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeeff7e9-1c2b-415e-a90b-8e06eea73336 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.765574] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 920.766516] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41f426f2-1f00-4a70-bdb4-55477a40f463 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.773052] env[61898]: DEBUG oslo_vmware.api [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 920.773052] env[61898]: value = "task-1240990" [ 920.773052] env[61898]: _type = "Task" [ 920.773052] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.781191] env[61898]: DEBUG oslo_vmware.api [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.833573] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 920.833573] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]529321da-f482-d51a-1595-e94b19048951" [ 920.833573] env[61898]: _type = "HttpNfcLease" [ 920.833573] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 920.833573] env[61898]: DEBUG oslo_vmware.rw_handles [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 920.833573] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]529321da-f482-d51a-1595-e94b19048951" [ 920.833573] env[61898]: _type = "HttpNfcLease" [ 920.833573] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 920.834690] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47576bbb-a886-4b10-a288-aca6d4a52a4a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.843132] env[61898]: DEBUG oslo_vmware.rw_handles [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204102b-9fe0-85c9-3235-0f334121ffec/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 920.843608] env[61898]: DEBUG oslo_vmware.rw_handles [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204102b-9fe0-85c9-3235-0f334121ffec/disk-0.vmdk. {{(pid=61898) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 920.916599] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-360dde03-52e8-4e60-bf1a-0941f170d721 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.984884] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.375s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 920.984884] env[61898]: INFO nova.compute.manager [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Migrating [ 920.996054] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.668s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.998116] env[61898]: INFO nova.compute.claims [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.129912] env[61898]: DEBUG nova.network.neutron [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updated VIF entry in instance network info cache for port f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 921.130294] env[61898]: DEBUG nova.network.neutron [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updating instance_info_cache with network_info: [{"id": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "address": "fa:16:3e:fb:a9:9c", "network": {"id": "6b546ef4-b25d-43da-a6dc-d9bad3ad3f0f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1330389419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "025bf0ed02e24a998d2a6f7cf7ae77b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8ab1155-cd", "ovs_interfaceid": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.132108] env[61898]: DEBUG nova.objects.base [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Object Instance<5323b250-fad8-4d71-81ed-c5e5eeb8aeab> lazy-loaded attributes: info_cache,migration_context {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 921.134469] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1ad07b-e023-4aaf-b6ae-98b729f0cb06 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.159988] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86c13671-d748-4009-b288-01198bc7348a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.167713] env[61898]: DEBUG oslo_vmware.api [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 921.167713] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5280a6ae-d8c0-d5a7-0a75-160a4f01b809" [ 921.167713] env[61898]: _type = "Task" [ 921.167713] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.177469] env[61898]: DEBUG oslo_vmware.api [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5280a6ae-d8c0-d5a7-0a75-160a4f01b809, 'name': SearchDatastore_Task, 'duration_secs': 0.007614} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.179467] env[61898]: DEBUG oslo_concurrency.lockutils [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.285832] env[61898]: DEBUG oslo_vmware.api [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240990, 'name': PowerOffVM_Task, 'duration_secs': 0.203935} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.287673] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.287861] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 921.288150] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bd3a478-cca5-4355-af66-fece27383afe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.364854] env[61898]: DEBUG nova.compute.manager [req-ced63441-af13-4e07-b3a4-7867350bd4d5 req-b3fee0a1-10ff-478a-ac13-ff20cadcf705 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Received event network-changed-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 921.365075] env[61898]: DEBUG nova.compute.manager [req-ced63441-af13-4e07-b3a4-7867350bd4d5 req-b3fee0a1-10ff-478a-ac13-ff20cadcf705 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Refreshing instance network info cache due to event network-changed-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 921.365276] env[61898]: DEBUG oslo_concurrency.lockutils [req-ced63441-af13-4e07-b3a4-7867350bd4d5 req-b3fee0a1-10ff-478a-ac13-ff20cadcf705 service nova] Acquiring lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.516653] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.516855] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.517276] env[61898]: DEBUG nova.network.neutron [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 921.639115] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Releasing lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 921.639495] env[61898]: DEBUG nova.compute.manager [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Received event network-changed-cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 921.640394] env[61898]: DEBUG nova.compute.manager [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Refreshing instance network info cache due to event network-changed-cd3bd232-226d-4ac0-a9f8-17b93aca92fb. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 921.641830] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Acquiring lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 921.642621] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Acquired lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.643200] env[61898]: DEBUG nova.network.neutron [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Refreshing network info cache for port cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.645786] env[61898]: DEBUG oslo_concurrency.lockutils [req-ced63441-af13-4e07-b3a4-7867350bd4d5 req-b3fee0a1-10ff-478a-ac13-ff20cadcf705 service nova] Acquired lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.646055] env[61898]: DEBUG nova.network.neutron [req-ced63441-af13-4e07-b3a4-7867350bd4d5 req-b3fee0a1-10ff-478a-ac13-ff20cadcf705 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Refreshing network info cache for port f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.993705] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 921.994123] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 921.994441] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Deleting the datastore file [datastore1] 9b7b9962-fda1-46af-9ecc-ea5b352d5193 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 921.996453] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-898f4f2e-2328-4e37-9522-c103a4f3919e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.002874] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 922.003140] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 922.003356] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleting the datastore file [datastore2] aab10d8f-0d25-4351-a627-7222be63895e {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.005326] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87669a5a-62e1-4739-ba14-70d7d97732a3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.009540] env[61898]: DEBUG oslo_vmware.api [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for the task: (returnval){ [ 922.009540] env[61898]: value = "task-1240992" [ 922.009540] env[61898]: _type = "Task" [ 922.009540] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.023868] env[61898]: DEBUG oslo_vmware.api [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 922.023868] env[61898]: value = "task-1240993" [ 922.023868] env[61898]: _type = "Task" [ 922.023868] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.043554] env[61898]: DEBUG oslo_vmware.api [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.050836] env[61898]: DEBUG oslo_vmware.api [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240993, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.199024] env[61898]: DEBUG oslo_vmware.rw_handles [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Completed reading data from the image iterator. {{(pid=61898) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 922.199024] env[61898]: DEBUG oslo_vmware.rw_handles [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204102b-9fe0-85c9-3235-0f334121ffec/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 922.199024] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14df8fed-a0df-44bd-a0a1-7a4465aa7f85 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.210510] env[61898]: DEBUG oslo_vmware.rw_handles [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204102b-9fe0-85c9-3235-0f334121ffec/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 922.210730] env[61898]: DEBUG oslo_vmware.rw_handles [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204102b-9fe0-85c9-3235-0f334121ffec/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 922.211018] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-6f8b0ce4-54c6-44d3-bb82-788b628ee4f6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.362185] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9e8900-9ead-4adb-b70a-302e8bfd2944 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.376852] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e03c47b-3739-4184-8e2a-295ccdd3d1d4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.423510] env[61898]: DEBUG nova.network.neutron [req-ced63441-af13-4e07-b3a4-7867350bd4d5 req-b3fee0a1-10ff-478a-ac13-ff20cadcf705 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updated VIF entry in instance network info cache for port f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.424406] env[61898]: DEBUG nova.network.neutron [req-ced63441-af13-4e07-b3a4-7867350bd4d5 req-b3fee0a1-10ff-478a-ac13-ff20cadcf705 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updating instance_info_cache with network_info: [{"id": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "address": "fa:16:3e:fb:a9:9c", "network": {"id": "6b546ef4-b25d-43da-a6dc-d9bad3ad3f0f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1330389419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "025bf0ed02e24a998d2a6f7cf7ae77b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8ab1155-cd", "ovs_interfaceid": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.426140] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7153fc31-7c43-4824-a101-021dbf569ada {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.437277] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8bd6f38-b2a7-4c3d-aaf1-45c086dc7d3b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.457249] env[61898]: DEBUG nova.compute.provider_tree [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.525225] env[61898]: DEBUG oslo_vmware.api [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Task: {'id': task-1240992, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.379054} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.525832] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.526146] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.526635] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.526767] env[61898]: INFO nova.compute.manager [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Took 3.52 seconds to destroy the instance on the hypervisor. [ 922.527150] env[61898]: DEBUG oslo.service.loopingcall [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.527415] env[61898]: DEBUG nova.compute.manager [-] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 922.527543] env[61898]: DEBUG nova.network.neutron [-] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 922.548028] env[61898]: DEBUG oslo_vmware.api [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1240993, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.207072} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.548028] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.548028] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.548028] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.548028] env[61898]: INFO nova.compute.manager [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Took 1.79 seconds to destroy the instance on the hypervisor. [ 922.548028] env[61898]: DEBUG oslo.service.loopingcall [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 922.548028] env[61898]: DEBUG nova.compute.manager [-] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 922.548028] env[61898]: DEBUG nova.network.neutron [-] [instance: aab10d8f-0d25-4351-a627-7222be63895e] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 922.587524] env[61898]: DEBUG oslo_vmware.rw_handles [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5204102b-9fe0-85c9-3235-0f334121ffec/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 922.587784] env[61898]: INFO nova.virt.vmwareapi.images [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Downloaded image file data 5d955d84-2f56-40e7-a5a1-0f6937a182cf [ 922.588634] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3e572f-45bc-4759-b87d-3ca14441d718 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.607192] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e47ab039-a9f7-4fd4-ba9b-318b3ed58259 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.649521] env[61898]: INFO nova.virt.vmwareapi.images [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] The imported VM was unregistered [ 922.652655] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Caching image {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 922.652979] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating directory with path [datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.653408] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae43cc0d-cf05-4412-a992-600181e5718a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.664851] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Created directory with path [datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.665098] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_01d4cbf7-83b5-4c4d-8a65-437a680263e4/OSTACK_IMG_01d4cbf7-83b5-4c4d-8a65-437a680263e4.vmdk to [datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf/5d955d84-2f56-40e7-a5a1-0f6937a182cf.vmdk. {{(pid=61898) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 922.667897] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-1945c83e-ef49-42ba-80cd-a5c0b194ab4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.678270] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 922.678270] env[61898]: value = "task-1240995" [ 922.678270] env[61898]: _type = "Task" [ 922.678270] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.686406] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240995, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.830704] env[61898]: DEBUG nova.network.neutron [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance_info_cache with network_info: [{"id": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "address": "fa:16:3e:42:f4:b3", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f33f2c4-46", "ovs_interfaceid": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.931928] env[61898]: DEBUG oslo_concurrency.lockutils [req-ced63441-af13-4e07-b3a4-7867350bd4d5 req-b3fee0a1-10ff-478a-ac13-ff20cadcf705 service nova] Releasing lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 922.954857] env[61898]: DEBUG nova.network.neutron [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updated VIF entry in instance network info cache for port cd3bd232-226d-4ac0-a9f8-17b93aca92fb. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.955276] env[61898]: DEBUG nova.network.neutron [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updating instance_info_cache with network_info: [{"id": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "address": "fa:16:3e:0e:f5:a4", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd3bd232-22", "ovs_interfaceid": "cd3bd232-226d-4ac0-a9f8-17b93aca92fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.962041] env[61898]: DEBUG nova.scheduler.client.report [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 923.106100] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.106278] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.190340] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240995, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.208096] env[61898]: DEBUG nova.compute.manager [req-a449da6a-add9-4314-b343-7df0faf91680 req-0a70455a-ffe8-4508-af85-11d7d04e00e6 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Received event network-vif-deleted-b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 923.208523] env[61898]: INFO nova.compute.manager [req-a449da6a-add9-4314-b343-7df0faf91680 req-0a70455a-ffe8-4508-af85-11d7d04e00e6 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Neutron deleted interface b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64; detaching it from the instance and deleting it from the info cache [ 923.208523] env[61898]: DEBUG nova.network.neutron [req-a449da6a-add9-4314-b343-7df0faf91680 req-0a70455a-ffe8-4508-af85-11d7d04e00e6 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.308901] env[61898]: DEBUG nova.network.neutron [-] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.336541] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.393300] env[61898]: DEBUG nova.compute.manager [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Received event network-changed-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 923.393724] env[61898]: DEBUG nova.compute.manager [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Refreshing instance network info cache due to event network-changed-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 923.393724] env[61898]: DEBUG oslo_concurrency.lockutils [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] Acquiring lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 923.393873] env[61898]: DEBUG oslo_concurrency.lockutils [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] Acquired lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.394109] env[61898]: DEBUG nova.network.neutron [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Refreshing network info cache for port f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 923.458478] env[61898]: DEBUG oslo_concurrency.lockutils [req-bef99365-b745-421c-b9bd-86c1ef962cb4 req-99587df9-750a-44f2-9192-280546e2ed7c service nova] Releasing lock "refresh_cache-070bc0cc-ff77-48b8-bd08-f17fe69e25af" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 923.470179] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.475s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.470885] env[61898]: DEBUG nova.compute.manager [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 923.474088] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.374s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.474357] env[61898]: DEBUG nova.objects.instance [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lazy-loading 'resources' on Instance uuid 45b8dc91-b577-4548-bf3a-32c7c936c616 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.608370] env[61898]: DEBUG nova.compute.manager [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 923.643197] env[61898]: DEBUG nova.network.neutron [-] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.693320] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240995, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.711372] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c02f6050-f8d2-46d0-b57a-d74f0ebb7e53 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.722165] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e564fbbc-d9d7-4b3b-9338-c2232c77c447 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.753753] env[61898]: DEBUG nova.compute.manager [req-a449da6a-add9-4314-b343-7df0faf91680 req-0a70455a-ffe8-4508-af85-11d7d04e00e6 service nova] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Detach interface failed, port_id=b7fb0979-2b7e-4f4d-85e5-2cd7f203ae64, reason: Instance 9b7b9962-fda1-46af-9ecc-ea5b352d5193 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 923.811467] env[61898]: INFO nova.compute.manager [-] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Took 1.28 seconds to deallocate network for instance. [ 923.887030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquiring lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.887366] env[61898]: DEBUG oslo_concurrency.lockutils [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.887646] env[61898]: DEBUG oslo_concurrency.lockutils [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquiring lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.887919] env[61898]: DEBUG oslo_concurrency.lockutils [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.888127] env[61898]: DEBUG oslo_concurrency.lockutils [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.892733] env[61898]: INFO nova.compute.manager [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Terminating instance [ 923.977269] env[61898]: DEBUG nova.compute.utils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 923.979865] env[61898]: DEBUG nova.objects.instance [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lazy-loading 'numa_topology' on Instance uuid 45b8dc91-b577-4548-bf3a-32c7c936c616 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.984031] env[61898]: DEBUG nova.compute.manager [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 923.984031] env[61898]: DEBUG nova.network.neutron [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 924.134508] env[61898]: DEBUG nova.policy [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee555b0896f748d1886e7037911db84f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c2e835a924c438287e7626c34c2fb05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 924.138336] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.146093] env[61898]: INFO nova.compute.manager [-] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Took 1.60 seconds to deallocate network for instance. [ 924.191801] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240995, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.318707] env[61898]: DEBUG oslo_concurrency.lockutils [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.397348] env[61898]: DEBUG nova.compute.manager [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 924.397823] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 924.399413] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e933ec55-69e1-4552-af1a-8ca269991905 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.412871] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.413406] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-389427d0-9ddc-4ecc-904b-357a08e422a5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.425561] env[61898]: DEBUG oslo_vmware.api [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 924.425561] env[61898]: value = "task-1240996" [ 924.425561] env[61898]: _type = "Task" [ 924.425561] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.438620] env[61898]: DEBUG oslo_vmware.api [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240996, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.465912] env[61898]: DEBUG nova.network.neutron [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updated VIF entry in instance network info cache for port f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 924.466604] env[61898]: DEBUG nova.network.neutron [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updating instance_info_cache with network_info: [{"id": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "address": "fa:16:3e:fb:a9:9c", "network": {"id": "6b546ef4-b25d-43da-a6dc-d9bad3ad3f0f", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1330389419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "025bf0ed02e24a998d2a6f7cf7ae77b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8ab1155-cd", "ovs_interfaceid": "f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.480646] env[61898]: DEBUG nova.compute.manager [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 924.485575] env[61898]: DEBUG nova.objects.base [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Object Instance<45b8dc91-b577-4548-bf3a-32c7c936c616> lazy-loaded attributes: resources,numa_topology {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 924.655148] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.695501] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240995, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.801795] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e912cd3-e4ba-438a-8450-62e85b6f020f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.805485] env[61898]: DEBUG nova.network.neutron [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Successfully created port: 1055fd09-3d12-49b4-bac7-a4b6b9208ca0 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 924.814935] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a2478d-e66e-42fe-af29-e5d37c04dea5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.853820] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a04c049-7bfd-4b45-9986-1572e911a762 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.857810] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37abc833-f6f9-4750-b07f-ea1900915134 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.877134] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance '7c6aad92-6e91-48fc-89ae-5ee4c89f449c' progress to 0 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 924.884525] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ab8b6a-ac1e-45e7-b0a5-a957676a0a6d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.899580] env[61898]: DEBUG nova.compute.provider_tree [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.938729] env[61898]: DEBUG oslo_vmware.api [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240996, 'name': PowerOffVM_Task, 'duration_secs': 0.410608} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.939123] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.939322] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 924.939593] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60ad525b-c5b9-4906-9778-bc3feabb65b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.970340] env[61898]: DEBUG oslo_concurrency.lockutils [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] Releasing lock "refresh_cache-b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.970633] env[61898]: DEBUG nova.compute.manager [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Received event network-vif-deleted-c2aa5537-be01-417c-8f48-83f2f9a04ff4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 924.970816] env[61898]: INFO nova.compute.manager [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Neutron deleted interface c2aa5537-be01-417c-8f48-83f2f9a04ff4; detaching it from the instance and deleting it from the info cache [ 924.970991] env[61898]: DEBUG nova.network.neutron [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.190546] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240995, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.505887} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.190828] env[61898]: INFO nova.virt.vmwareapi.ds_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_01d4cbf7-83b5-4c4d-8a65-437a680263e4/OSTACK_IMG_01d4cbf7-83b5-4c4d-8a65-437a680263e4.vmdk to [datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf/5d955d84-2f56-40e7-a5a1-0f6937a182cf.vmdk. [ 925.191052] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Cleaning up location [datastore2] OSTACK_IMG_01d4cbf7-83b5-4c4d-8a65-437a680263e4 {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 925.191237] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_01d4cbf7-83b5-4c4d-8a65-437a680263e4 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.191503] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6823c7f9-c44c-4ad7-9167-f99258106648 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.197820] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 925.197820] env[61898]: value = "task-1240998" [ 925.197820] env[61898]: _type = "Task" [ 925.197820] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.205688] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240998, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.268974] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 925.269307] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 925.269498] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Deleting the datastore file [datastore2] b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.270046] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4f37948-75bc-459a-8b19-390ea9dcdbec {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.276631] env[61898]: DEBUG oslo_vmware.api [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for the task: (returnval){ [ 925.276631] env[61898]: value = "task-1240999" [ 925.276631] env[61898]: _type = "Task" [ 925.276631] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.284395] env[61898]: DEBUG oslo_vmware.api [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.386147] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.388868] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c17c6fd-cc2c-4fed-acdd-aec64aaafcc4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.396554] env[61898]: DEBUG oslo_vmware.api [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 925.396554] env[61898]: value = "task-1241000" [ 925.396554] env[61898]: _type = "Task" [ 925.396554] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.405483] env[61898]: DEBUG nova.scheduler.client.report [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 925.408900] env[61898]: DEBUG oslo_vmware.api [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241000, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.473793] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-02ead943-8c19-45a3-ac59-44fec48fd50f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.485007] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16cd9ac-e7c9-47d3-a9d9-49467e1046b0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.499933] env[61898]: DEBUG nova.compute.manager [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 925.519522] env[61898]: DEBUG nova.compute.manager [req-5495e9e5-e9f1-43d1-9993-f222627ed97a req-012269be-ab81-4a3e-80bf-ef566209a62b service nova] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Detach interface failed, port_id=c2aa5537-be01-417c-8f48-83f2f9a04ff4, reason: Instance aab10d8f-0d25-4351-a627-7222be63895e could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 925.528816] env[61898]: DEBUG nova.virt.hardware [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 925.529106] env[61898]: DEBUG nova.virt.hardware [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 925.529282] env[61898]: DEBUG nova.virt.hardware [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 925.529478] env[61898]: DEBUG nova.virt.hardware [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 925.529631] env[61898]: DEBUG nova.virt.hardware [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 925.529784] env[61898]: DEBUG nova.virt.hardware [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 925.530075] env[61898]: DEBUG nova.virt.hardware [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 925.530315] env[61898]: DEBUG nova.virt.hardware [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 925.530540] env[61898]: DEBUG nova.virt.hardware [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 925.530763] env[61898]: DEBUG nova.virt.hardware [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 925.530986] env[61898]: DEBUG nova.virt.hardware [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 925.531868] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbb8f08-431e-4ed5-9bd1-f06b98fa2d29 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.539598] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f311be-f803-461c-9e77-e3e647039d90 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.707164] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1240998, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.053073} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.707448] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.707640] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf/5d955d84-2f56-40e7-a5a1-0f6937a182cf.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.707890] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf/5d955d84-2f56-40e7-a5a1-0f6937a182cf.vmdk to [datastore2] 070bc0cc-ff77-48b8-bd08-f17fe69e25af/070bc0cc-ff77-48b8-bd08-f17fe69e25af.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 925.708156] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34b2668c-2a71-4001-b4ff-f1de73fc2e82 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.714762] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 925.714762] env[61898]: value = "task-1241001" [ 925.714762] env[61898]: _type = "Task" [ 925.714762] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.723903] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.786504] env[61898]: DEBUG oslo_vmware.api [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.911078] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.437s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.913754] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.033s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.914621] env[61898]: DEBUG nova.objects.instance [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'resources' on Instance uuid 1fb4535d-47d8-45c5-b6d6-d05e57237b98 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.915526] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] VM already powered off {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 925.915526] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance '7c6aad92-6e91-48fc-89ae-5ee4c89f449c' progress to 17 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 926.227779] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241001, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.273356] env[61898]: DEBUG nova.compute.manager [req-483edbcc-c037-474f-a2b5-97f9836f8d15 req-375ff455-3946-4909-bf37-9daf7100369f service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Received event network-vif-plugged-1055fd09-3d12-49b4-bac7-a4b6b9208ca0 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 926.273577] env[61898]: DEBUG oslo_concurrency.lockutils [req-483edbcc-c037-474f-a2b5-97f9836f8d15 req-375ff455-3946-4909-bf37-9daf7100369f service nova] Acquiring lock "47208ebd-8407-4d00-8378-adb0a4a21c2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.273799] env[61898]: DEBUG oslo_concurrency.lockutils [req-483edbcc-c037-474f-a2b5-97f9836f8d15 req-375ff455-3946-4909-bf37-9daf7100369f service nova] Lock "47208ebd-8407-4d00-8378-adb0a4a21c2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.273970] env[61898]: DEBUG oslo_concurrency.lockutils [req-483edbcc-c037-474f-a2b5-97f9836f8d15 req-375ff455-3946-4909-bf37-9daf7100369f service nova] Lock "47208ebd-8407-4d00-8378-adb0a4a21c2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.274160] env[61898]: DEBUG nova.compute.manager [req-483edbcc-c037-474f-a2b5-97f9836f8d15 req-375ff455-3946-4909-bf37-9daf7100369f service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] No waiting events found dispatching network-vif-plugged-1055fd09-3d12-49b4-bac7-a4b6b9208ca0 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 926.274349] env[61898]: WARNING nova.compute.manager [req-483edbcc-c037-474f-a2b5-97f9836f8d15 req-375ff455-3946-4909-bf37-9daf7100369f service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Received unexpected event network-vif-plugged-1055fd09-3d12-49b4-bac7-a4b6b9208ca0 for instance with vm_state building and task_state spawning. [ 926.290698] env[61898]: DEBUG oslo_vmware.api [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240999, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.335692] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.336156] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.336420] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 926.426835] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 926.427551] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 926.427856] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.428189] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 926.428492] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.428782] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 926.429165] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 926.429476] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 926.429787] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 926.430143] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 926.430471] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.438079] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76e24af6-b61c-42d3-a25b-4ba8740a0bd2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.451066] env[61898]: DEBUG oslo_concurrency.lockutils [None req-b26876a8-38b5-464c-bc3b-bea59c4f1447 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 40.711s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.453018] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 14.638s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.453018] env[61898]: INFO nova.compute.manager [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Unshelving [ 926.468038] env[61898]: DEBUG oslo_vmware.api [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 926.468038] env[61898]: value = "task-1241002" [ 926.468038] env[61898]: _type = "Task" [ 926.468038] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.481992] env[61898]: DEBUG oslo_vmware.api [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241002, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.484589] env[61898]: DEBUG nova.network.neutron [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Successfully updated port: 1055fd09-3d12-49b4-bac7-a4b6b9208ca0 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 926.731156] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241001, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.734826] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc0d7203-3287-4dd1-b2da-6954fb0ef6fc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.745017] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003aa7cf-1d89-4d70-ab85-5a97d5e6ac81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.784789] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f3e759-6484-4c84-ba2a-04045369b9b8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.795985] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6c5fe3-4f7e-4bd9-b531-da5ac950d82e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.800292] env[61898]: DEBUG oslo_vmware.api [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Task: {'id': task-1240999, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.181757} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.800553] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 926.800700] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 926.800894] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 926.801087] env[61898]: INFO nova.compute.manager [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Took 2.40 seconds to destroy the instance on the hypervisor. [ 926.801347] env[61898]: DEBUG oslo.service.loopingcall [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 926.801987] env[61898]: DEBUG nova.compute.manager [-] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 926.802178] env[61898]: DEBUG nova.network.neutron [-] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.814374] env[61898]: DEBUG nova.compute.provider_tree [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.979292] env[61898]: DEBUG oslo_vmware.api [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241002, 'name': ReconfigVM_Task, 'duration_secs': 0.167769} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.979411] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance '7c6aad92-6e91-48fc-89ae-5ee4c89f449c' progress to 33 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 926.988367] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-47208ebd-8407-4d00-8378-adb0a4a21c2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.988367] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-47208ebd-8407-4d00-8378-adb0a4a21c2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.988367] env[61898]: DEBUG nova.network.neutron [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.228744] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241001, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.318071] env[61898]: DEBUG nova.scheduler.client.report [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 927.374468] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "refresh_cache-1aa03975-f18f-4e64-836e-e991b73ee9d5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.374717] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquired lock "refresh_cache-1aa03975-f18f-4e64-836e-e991b73ee9d5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.374886] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Forcefully refreshing network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 927.484881] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.487323] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 927.487662] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 927.487852] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 927.488102] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 927.488345] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 927.488535] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 927.488753] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 927.488934] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 927.489212] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 927.489468] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 927.489686] env[61898]: DEBUG nova.virt.hardware [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 927.495905] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Reconfiguring VM instance instance-00000041 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 927.498264] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1c14700-7c39-417c-9ca5-f1fc022c9862 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.522211] env[61898]: DEBUG oslo_vmware.api [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 927.522211] env[61898]: value = "task-1241003" [ 927.522211] env[61898]: _type = "Task" [ 927.522211] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.537063] env[61898]: DEBUG oslo_vmware.api [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241003, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.560944] env[61898]: DEBUG nova.network.neutron [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 927.729328] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241001, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.744059] env[61898]: DEBUG nova.network.neutron [-] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.785267] env[61898]: DEBUG nova.network.neutron [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Updating instance_info_cache with network_info: [{"id": "1055fd09-3d12-49b4-bac7-a4b6b9208ca0", "address": "fa:16:3e:85:20:45", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1055fd09-3d", "ovs_interfaceid": "1055fd09-3d12-49b4-bac7-a4b6b9208ca0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.823838] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.910s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.826916] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.995s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.828807] env[61898]: INFO nova.compute.claims [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.849711] env[61898]: INFO nova.scheduler.client.report [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Deleted allocations for instance 1fb4535d-47d8-45c5-b6d6-d05e57237b98 [ 928.040742] env[61898]: DEBUG oslo_vmware.api [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241003, 'name': ReconfigVM_Task, 'duration_secs': 0.174373} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.041318] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Reconfigured VM instance instance-00000041 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 928.042771] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22fee70-0b65-4f4a-86a4-365d1e379460 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.065690] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 7c6aad92-6e91-48fc-89ae-5ee4c89f449c/7c6aad92-6e91-48fc-89ae-5ee4c89f449c.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.066332] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2fe99425-dccb-40e6-a067-99f3e1bc58e6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.083060] env[61898]: DEBUG oslo_vmware.api [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 928.083060] env[61898]: value = "task-1241004" [ 928.083060] env[61898]: _type = "Task" [ 928.083060] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.090397] env[61898]: DEBUG oslo_vmware.api [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241004, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.227271] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241001, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.347736} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.227594] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/5d955d84-2f56-40e7-a5a1-0f6937a182cf/5d955d84-2f56-40e7-a5a1-0f6937a182cf.vmdk to [datastore2] 070bc0cc-ff77-48b8-bd08-f17fe69e25af/070bc0cc-ff77-48b8-bd08-f17fe69e25af.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 928.228464] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d7f595-88c4-42ed-8ae5-8679603d64e0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.255328] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 070bc0cc-ff77-48b8-bd08-f17fe69e25af/070bc0cc-ff77-48b8-bd08-f17fe69e25af.vmdk or device None with type streamOptimized {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.255328] env[61898]: INFO nova.compute.manager [-] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Took 1.45 seconds to deallocate network for instance. [ 928.255328] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9df0bf61-52ca-451c-b65d-15c0c6a6843e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.278385] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 928.278385] env[61898]: value = "task-1241005" [ 928.278385] env[61898]: _type = "Task" [ 928.278385] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.288381] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-47208ebd-8407-4d00-8378-adb0a4a21c2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.288728] env[61898]: DEBUG nova.compute.manager [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Instance network_info: |[{"id": "1055fd09-3d12-49b4-bac7-a4b6b9208ca0", "address": "fa:16:3e:85:20:45", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1055fd09-3d", "ovs_interfaceid": "1055fd09-3d12-49b4-bac7-a4b6b9208ca0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 928.289105] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241005, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.289521] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:20:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69f65356-c85e-4b7f-ad28-7c7b5e8cf50c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1055fd09-3d12-49b4-bac7-a4b6b9208ca0', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 928.298032] env[61898]: DEBUG oslo.service.loopingcall [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.298274] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 928.298526] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9bc802d-f281-45ac-b29d-85d966ef6820 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.323923] env[61898]: DEBUG nova.compute.manager [req-9fe7895f-3455-4a67-9625-e98cbeb8a085 req-3c8fe4a6-b6a4-49b8-a251-e07f840bea63 service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Received event network-changed-1055fd09-3d12-49b4-bac7-a4b6b9208ca0 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 928.324135] env[61898]: DEBUG nova.compute.manager [req-9fe7895f-3455-4a67-9625-e98cbeb8a085 req-3c8fe4a6-b6a4-49b8-a251-e07f840bea63 service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Refreshing instance network info cache due to event network-changed-1055fd09-3d12-49b4-bac7-a4b6b9208ca0. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 928.324398] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fe7895f-3455-4a67-9625-e98cbeb8a085 req-3c8fe4a6-b6a4-49b8-a251-e07f840bea63 service nova] Acquiring lock "refresh_cache-47208ebd-8407-4d00-8378-adb0a4a21c2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.324549] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fe7895f-3455-4a67-9625-e98cbeb8a085 req-3c8fe4a6-b6a4-49b8-a251-e07f840bea63 service nova] Acquired lock "refresh_cache-47208ebd-8407-4d00-8378-adb0a4a21c2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.324750] env[61898]: DEBUG nova.network.neutron [req-9fe7895f-3455-4a67-9625-e98cbeb8a085 req-3c8fe4a6-b6a4-49b8-a251-e07f840bea63 service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Refreshing network info cache for port 1055fd09-3d12-49b4-bac7-a4b6b9208ca0 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 928.327538] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 928.327538] env[61898]: value = "task-1241006" [ 928.327538] env[61898]: _type = "Task" [ 928.327538] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.336654] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241006, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.356927] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7ff87378-cc78-4e4b-a7e5-7663e558f7d5 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "1fb4535d-47d8-45c5-b6d6-d05e57237b98" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.801s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.593402] env[61898]: DEBUG oslo_vmware.api [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241004, 'name': ReconfigVM_Task, 'duration_secs': 0.263895} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.593736] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 7c6aad92-6e91-48fc-89ae-5ee4c89f449c/7c6aad92-6e91-48fc-89ae-5ee4c89f449c.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.593736] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance '7c6aad92-6e91-48fc-89ae-5ee4c89f449c' progress to 50 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 928.675037] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Updating instance_info_cache with network_info: [{"id": "ddb06f4c-13ed-4322-b1e8-f4022b32e4f4", "address": "fa:16:3e:d2:5d:13", "network": {"id": "39bcc058-c5eb-42ba-85a2-2751b4f5000c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-691533565-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "819c8a7ff0aa4d7186bd859e4b56d16e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapddb06f4c-13", "ovs_interfaceid": "ddb06f4c-13ed-4322-b1e8-f4022b32e4f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.780309] env[61898]: DEBUG oslo_concurrency.lockutils [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.790982] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241005, 'name': ReconfigVM_Task, 'duration_secs': 0.319723} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.791961] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 070bc0cc-ff77-48b8-bd08-f17fe69e25af/070bc0cc-ff77-48b8-bd08-f17fe69e25af.vmdk or device None with type streamOptimized {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.793118] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'disk_bus': None, 'boot_index': 0, 'device_name': '/dev/sda', 'encryption_format': None, 'device_type': 'disk', 'encrypted': False, 'size': 0, 'guest_format': None, 'encryption_secret_uuid': None, 'encryption_options': None, 'image_id': 'e07a6c11-ab12-4187-81fc-1a28a9d1e65d'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267683', 'volume_id': 'b04c905b-4035-4be9-9960-21b687a5e2a9', 'name': 'volume-b04c905b-4035-4be9-9960-21b687a5e2a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '070bc0cc-ff77-48b8-bd08-f17fe69e25af', 'attached_at': '', 'detached_at': '', 'volume_id': 'b04c905b-4035-4be9-9960-21b687a5e2a9', 'serial': 'b04c905b-4035-4be9-9960-21b687a5e2a9'}, 'disk_bus': None, 'boot_index': None, 'attachment_id': 'fab4d4a2-10dd-45df-9077-5e46841d7045', 'delete_on_termination': False, 'mount_device': '/dev/sdb', 'device_type': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=61898) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 928.793333] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Volume attach. Driver type: vmdk {{(pid=61898) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 928.793528] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267683', 'volume_id': 'b04c905b-4035-4be9-9960-21b687a5e2a9', 'name': 'volume-b04c905b-4035-4be9-9960-21b687a5e2a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '070bc0cc-ff77-48b8-bd08-f17fe69e25af', 'attached_at': '', 'detached_at': '', 'volume_id': 'b04c905b-4035-4be9-9960-21b687a5e2a9', 'serial': 'b04c905b-4035-4be9-9960-21b687a5e2a9'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 928.794344] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d659d345-261e-4f54-a591-d7221704fb68 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.812637] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201fecb9-8b58-43b6-8b6c-2c42c283adee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.841484] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] volume-b04c905b-4035-4be9-9960-21b687a5e2a9/volume-b04c905b-4035-4be9-9960-21b687a5e2a9.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.849531] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70d722cb-1a6b-408a-8c84-235224e0fc22 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.869626] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241006, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.871029] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 928.871029] env[61898]: value = "task-1241007" [ 928.871029] env[61898]: _type = "Task" [ 928.871029] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.879824] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241007, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.104895] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148b9c9a-212c-472e-ba7c-7ddce1544d0a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.127058] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e51a5f10-11f2-4161-b937-1ab7073ccf91 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.131900] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97993e57-729b-48c0-9f84-baacee7678b3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.149976] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance '7c6aad92-6e91-48fc-89ae-5ee4c89f449c' progress to 67 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 929.158687] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd36575-327f-45f4-bdd3-d056ada5fecc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.188829] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Releasing lock "refresh_cache-1aa03975-f18f-4e64-836e-e991b73ee9d5" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.189097] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Updated the network info_cache for instance {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 929.189364] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.190216] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b1cd3c-ec89-440a-a8f6-e8fbc3039d9a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.192822] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.193079] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.193692] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.193911] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.194123] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.194265] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61898) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 929.194415] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 929.200188] env[61898]: DEBUG nova.network.neutron [req-9fe7895f-3455-4a67-9625-e98cbeb8a085 req-3c8fe4a6-b6a4-49b8-a251-e07f840bea63 service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Updated VIF entry in instance network info cache for port 1055fd09-3d12-49b4-bac7-a4b6b9208ca0. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 929.200582] env[61898]: DEBUG nova.network.neutron [req-9fe7895f-3455-4a67-9625-e98cbeb8a085 req-3c8fe4a6-b6a4-49b8-a251-e07f840bea63 service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Updating instance_info_cache with network_info: [{"id": "1055fd09-3d12-49b4-bac7-a4b6b9208ca0", "address": "fa:16:3e:85:20:45", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1055fd09-3d", "ovs_interfaceid": "1055fd09-3d12-49b4-bac7-a4b6b9208ca0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.203513] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493bf30f-ffb3-4db8-9cf3-2e9fd87924e1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.221803] env[61898]: DEBUG nova.compute.provider_tree [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.342859] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241006, 'name': CreateVM_Task, 'duration_secs': 0.651692} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.344920] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 929.344920] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.344920] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.344920] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 929.344920] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbe12011-be50-4523-b020-9ff04879fc5e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.349176] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 929.349176] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d9d6b9-aa12-2720-7215-7c27c99b960e" [ 929.349176] env[61898]: _type = "Task" [ 929.349176] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.357722] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d9d6b9-aa12-2720-7215-7c27c99b960e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.379801] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241007, 'name': ReconfigVM_Task, 'duration_secs': 0.361781} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.380490] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Reconfigured VM instance instance-00000045 to attach disk [datastore1] volume-b04c905b-4035-4be9-9960-21b687a5e2a9/volume-b04c905b-4035-4be9-9960-21b687a5e2a9.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.386328] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79f6854e-2c82-4976-88be-71883c625b1b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.402695] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 929.402695] env[61898]: value = "task-1241008" [ 929.402695] env[61898]: _type = "Task" [ 929.402695] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.410921] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241008, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.697854] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.703573] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fe7895f-3455-4a67-9625-e98cbeb8a085 req-3c8fe4a6-b6a4-49b8-a251-e07f840bea63 service nova] Releasing lock "refresh_cache-47208ebd-8407-4d00-8378-adb0a4a21c2a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.703830] env[61898]: DEBUG nova.compute.manager [req-9fe7895f-3455-4a67-9625-e98cbeb8a085 req-3c8fe4a6-b6a4-49b8-a251-e07f840bea63 service nova] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Received event network-vif-deleted-f8ab1155-cd50-4b2d-859b-a9e7bdc41b1f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 929.725099] env[61898]: DEBUG nova.scheduler.client.report [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 929.752149] env[61898]: DEBUG nova.network.neutron [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Port 9f33f2c4-4626-4230-90ea-e91c5f0da486 binding to destination host cpu-1 is already ACTIVE {{(pid=61898) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 929.861024] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d9d6b9-aa12-2720-7215-7c27c99b960e, 'name': SearchDatastore_Task, 'duration_secs': 0.010359} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.861360] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.861615] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.861854] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.862009] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.862226] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.862496] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-596ab9f0-aabd-479c-af03-083ef083d10d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.870706] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 929.870889] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 929.871628] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5da1a7eb-b4bf-437a-92fb-be5710bf2139 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.876820] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 929.876820] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d2e8fc-a1fc-245b-e160-8730334546b7" [ 929.876820] env[61898]: _type = "Task" [ 929.876820] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.884511] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d2e8fc-a1fc-245b-e160-8730334546b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.911704] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241008, 'name': ReconfigVM_Task, 'duration_secs': 0.150874} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.912028] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267683', 'volume_id': 'b04c905b-4035-4be9-9960-21b687a5e2a9', 'name': 'volume-b04c905b-4035-4be9-9960-21b687a5e2a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '070bc0cc-ff77-48b8-bd08-f17fe69e25af', 'attached_at': '', 'detached_at': '', 'volume_id': 'b04c905b-4035-4be9-9960-21b687a5e2a9', 'serial': 'b04c905b-4035-4be9-9960-21b687a5e2a9'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 929.912778] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17cb71a2-b2d3-45aa-80a6-bad514f1229c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.919734] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 929.919734] env[61898]: value = "task-1241009" [ 929.919734] env[61898]: _type = "Task" [ 929.919734] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.928087] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241009, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.230903] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.230903] env[61898]: DEBUG nova.compute.manager [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 930.234077] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.145s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.234989] env[61898]: INFO nova.compute.claims [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 930.387581] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d2e8fc-a1fc-245b-e160-8730334546b7, 'name': SearchDatastore_Task, 'duration_secs': 0.008139} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.388411] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b5fbdc5-11c2-4756-9f74-bdff7b2d9e2a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.393833] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 930.393833] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5231890e-5c08-3032-ef56-65d3dbffa274" [ 930.393833] env[61898]: _type = "Task" [ 930.393833] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.403120] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5231890e-5c08-3032-ef56-65d3dbffa274, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.429333] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241009, 'name': Rename_Task, 'duration_secs': 0.153171} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.429615] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 930.429861] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-517ab78b-5e5d-470c-b675-2d85b9d9a7d5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.436084] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 930.436084] env[61898]: value = "task-1241010" [ 930.436084] env[61898]: _type = "Task" [ 930.436084] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.450899] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241010, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.683602] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.683841] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.739187] env[61898]: DEBUG nova.compute.utils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 930.742367] env[61898]: DEBUG nova.compute.manager [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 930.742533] env[61898]: DEBUG nova.network.neutron [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 930.771961] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.772912] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.772912] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.788626] env[61898]: DEBUG nova.policy [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e2c909f4306477d8fc741ab3aac9d02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e8b71885c83418fb13e216f804ffeeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 930.907667] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5231890e-5c08-3032-ef56-65d3dbffa274, 'name': SearchDatastore_Task, 'duration_secs': 0.00899} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.907667] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.907667] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 47208ebd-8407-4d00-8378-adb0a4a21c2a/47208ebd-8407-4d00-8378-adb0a4a21c2a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 930.907667] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d45271a-6e4a-417a-8d37-cf06f109c901 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.912707] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 930.912707] env[61898]: value = "task-1241011" [ 930.912707] env[61898]: _type = "Task" [ 930.912707] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.921749] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241011, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.946279] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241010, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.100269] env[61898]: DEBUG nova.network.neutron [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Successfully created port: e508f4ea-8f2b-492f-b0e2-3eb68afaaa15 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.186254] env[61898]: DEBUG nova.compute.manager [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 931.243758] env[61898]: DEBUG nova.compute.manager [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 931.427378] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241011, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.450528] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241010, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.580020] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb93ba0-2df8-4550-aa99-ffe257b7216f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.588183] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23df01b-3bb7-4f0a-91fd-d5a8424433a5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.623956] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5423525-9e6f-40ba-934e-5c98180fc644 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.633670] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efdad4db-34f7-4f9c-b1bb-69ba7dad428c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.647767] env[61898]: DEBUG nova.compute.provider_tree [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.708182] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.829105] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.829492] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.829587] env[61898]: DEBUG nova.network.neutron [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 931.924405] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241011, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526652} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.924706] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 47208ebd-8407-4d00-8378-adb0a4a21c2a/47208ebd-8407-4d00-8378-adb0a4a21c2a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 931.924924] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 931.925215] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26e586a3-e3e1-4537-ad50-03d57bc6ec2e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.931720] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 931.931720] env[61898]: value = "task-1241012" [ 931.931720] env[61898]: _type = "Task" [ 931.931720] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.942094] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241012, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.950071] env[61898]: DEBUG oslo_vmware.api [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241010, 'name': PowerOnVM_Task, 'duration_secs': 1.244861} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.950392] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 932.100427] env[61898]: DEBUG nova.compute.manager [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 932.101353] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a6b6dd-1109-49c7-a2f8-c275d4ad2f47 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.150547] env[61898]: DEBUG nova.scheduler.client.report [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 932.255095] env[61898]: DEBUG nova.compute.manager [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 932.276657] env[61898]: DEBUG nova.virt.hardware [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1576e8899d1f9b5c49c4604b9c1494fd',container_format='bare',created_at=2024-10-10T12:01:05Z,direct_url=,disk_format='vmdk',id=7d207fb3-39a4-452d-a133-40f06b6cc713,min_disk=1,min_ram=0,name='tempest-test-snap-2084074098',owner='7e8b71885c83418fb13e216f804ffeeb',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-10T12:01:20Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 932.276922] env[61898]: DEBUG nova.virt.hardware [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 932.277097] env[61898]: DEBUG nova.virt.hardware [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.277329] env[61898]: DEBUG nova.virt.hardware [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 932.277431] env[61898]: DEBUG nova.virt.hardware [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.277579] env[61898]: DEBUG nova.virt.hardware [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 932.277779] env[61898]: DEBUG nova.virt.hardware [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 932.277942] env[61898]: DEBUG nova.virt.hardware [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 932.278393] env[61898]: DEBUG nova.virt.hardware [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 932.278495] env[61898]: DEBUG nova.virt.hardware [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 932.278659] env[61898]: DEBUG nova.virt.hardware [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 932.279555] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c68a017-6371-4481-9aa4-f50336330db3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.287268] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d105815a-db67-4bdc-ae43-4f6f1fcc0423 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.442021] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241012, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065919} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.442406] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 932.443650] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b57ec51-acc7-4f0e-a960-00eb7eeaf21c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.465346] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 47208ebd-8407-4d00-8378-adb0a4a21c2a/47208ebd-8407-4d00-8378-adb0a4a21c2a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.465565] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66c5a400-9657-4484-b66e-a7c8c6208b30 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.486185] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 932.486185] env[61898]: value = "task-1241013" [ 932.486185] env[61898]: _type = "Task" [ 932.486185] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.493712] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241013, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.515273] env[61898]: DEBUG oslo_concurrency.lockutils [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquiring lock "11ca5129-0dc3-44b3-8f7b-215c93dac764" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.515526] env[61898]: DEBUG oslo_concurrency.lockutils [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Lock "11ca5129-0dc3-44b3-8f7b-215c93dac764" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.515735] env[61898]: DEBUG oslo_concurrency.lockutils [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquiring lock "11ca5129-0dc3-44b3-8f7b-215c93dac764-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.515945] env[61898]: DEBUG oslo_concurrency.lockutils [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Lock "11ca5129-0dc3-44b3-8f7b-215c93dac764-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.516165] env[61898]: DEBUG oslo_concurrency.lockutils [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Lock "11ca5129-0dc3-44b3-8f7b-215c93dac764-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.518469] env[61898]: INFO nova.compute.manager [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Terminating instance [ 932.619536] env[61898]: DEBUG oslo_concurrency.lockutils [None req-73363544-2514-4c42-95bc-ee9a615b572e tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 40.754s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.655646] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.656212] env[61898]: DEBUG nova.compute.manager [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 932.659097] env[61898]: DEBUG oslo_concurrency.lockutils [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 11.480s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.698375] env[61898]: DEBUG nova.network.neutron [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance_info_cache with network_info: [{"id": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "address": "fa:16:3e:42:f4:b3", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f33f2c4-46", "ovs_interfaceid": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.996845] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241013, 'name': ReconfigVM_Task, 'duration_secs': 0.274758} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.997308] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 47208ebd-8407-4d00-8378-adb0a4a21c2a/47208ebd-8407-4d00-8378-adb0a4a21c2a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.997953] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21be92c6-e73a-4f39-bb10-d1efb79a60fb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.005310] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 933.005310] env[61898]: value = "task-1241014" [ 933.005310] env[61898]: _type = "Task" [ 933.005310] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.010201] env[61898]: DEBUG nova.compute.manager [req-b9043c6f-31b5-4382-a0f1-a773744961c4 req-1719dac5-9139-4cf1-ab03-95cdc53c4108 service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Received event network-vif-plugged-e508f4ea-8f2b-492f-b0e2-3eb68afaaa15 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 933.010363] env[61898]: DEBUG oslo_concurrency.lockutils [req-b9043c6f-31b5-4382-a0f1-a773744961c4 req-1719dac5-9139-4cf1-ab03-95cdc53c4108 service nova] Acquiring lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.010571] env[61898]: DEBUG oslo_concurrency.lockutils [req-b9043c6f-31b5-4382-a0f1-a773744961c4 req-1719dac5-9139-4cf1-ab03-95cdc53c4108 service nova] Lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 933.010740] env[61898]: DEBUG oslo_concurrency.lockutils [req-b9043c6f-31b5-4382-a0f1-a773744961c4 req-1719dac5-9139-4cf1-ab03-95cdc53c4108 service nova] Lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.010988] env[61898]: DEBUG nova.compute.manager [req-b9043c6f-31b5-4382-a0f1-a773744961c4 req-1719dac5-9139-4cf1-ab03-95cdc53c4108 service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] No waiting events found dispatching network-vif-plugged-e508f4ea-8f2b-492f-b0e2-3eb68afaaa15 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 933.011189] env[61898]: WARNING nova.compute.manager [req-b9043c6f-31b5-4382-a0f1-a773744961c4 req-1719dac5-9139-4cf1-ab03-95cdc53c4108 service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Received unexpected event network-vif-plugged-e508f4ea-8f2b-492f-b0e2-3eb68afaaa15 for instance with vm_state building and task_state spawning. [ 933.017144] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241014, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.021820] env[61898]: DEBUG nova.compute.manager [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 933.022037] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 933.023025] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1523f18a-bdef-4f69-a036-1c79f36bf3c7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.029638] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 933.029831] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b6d94b3-2f96-4e7e-9b1c-b3c955bd7aa3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.035979] env[61898]: DEBUG oslo_vmware.api [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for the task: (returnval){ [ 933.035979] env[61898]: value = "task-1241015" [ 933.035979] env[61898]: _type = "Task" [ 933.035979] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.043729] env[61898]: DEBUG oslo_vmware.api [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1241015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.162398] env[61898]: DEBUG nova.compute.utils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 933.166802] env[61898]: DEBUG nova.compute.manager [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 933.167959] env[61898]: DEBUG nova.network.neutron [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 933.201075] env[61898]: DEBUG oslo_concurrency.lockutils [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.211458] env[61898]: DEBUG nova.policy [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080b70de4bd5465e8e696943b90f4ff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2c65efa327e403284ad2e78b3c7b7d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 933.423898] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec5fd5f-39d2-40fa-8107-481de58961ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.432327] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d2a2fb-7a72-44c7-acac-580976024a08 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.463902] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1893d3-521c-492e-9da2-8d5d741affad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.471653] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec318fc-9b18-47cc-8736-08c84e03b5df {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.487467] env[61898]: DEBUG nova.compute.provider_tree [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.490056] env[61898]: DEBUG nova.network.neutron [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Successfully updated port: e508f4ea-8f2b-492f-b0e2-3eb68afaaa15 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.514730] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241014, 'name': Rename_Task, 'duration_secs': 0.148614} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.515341] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 933.515341] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f59fbc03-e209-43f0-847e-cf5ee6ef94c3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.522780] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 933.522780] env[61898]: value = "task-1241016" [ 933.522780] env[61898]: _type = "Task" [ 933.522780] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.534693] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241016, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.545197] env[61898]: DEBUG oslo_vmware.api [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1241015, 'name': PowerOffVM_Task, 'duration_secs': 0.192323} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.545456] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.545747] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 933.545876] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d0db1e24-2878-441e-9fb5-0fed8970c579 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.609069] env[61898]: DEBUG nova.compute.manager [req-da1b3885-9a04-4a83-afda-7325241a3157 req-cc58841b-2700-438f-a669-d3e7c2a3759f service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Received event network-changed-e508f4ea-8f2b-492f-b0e2-3eb68afaaa15 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 933.609308] env[61898]: DEBUG nova.compute.manager [req-da1b3885-9a04-4a83-afda-7325241a3157 req-cc58841b-2700-438f-a669-d3e7c2a3759f service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Refreshing instance network info cache due to event network-changed-e508f4ea-8f2b-492f-b0e2-3eb68afaaa15. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 933.609527] env[61898]: DEBUG oslo_concurrency.lockutils [req-da1b3885-9a04-4a83-afda-7325241a3157 req-cc58841b-2700-438f-a669-d3e7c2a3759f service nova] Acquiring lock "refresh_cache-5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.609695] env[61898]: DEBUG oslo_concurrency.lockutils [req-da1b3885-9a04-4a83-afda-7325241a3157 req-cc58841b-2700-438f-a669-d3e7c2a3759f service nova] Acquired lock "refresh_cache-5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.609827] env[61898]: DEBUG nova.network.neutron [req-da1b3885-9a04-4a83-afda-7325241a3157 req-cc58841b-2700-438f-a669-d3e7c2a3759f service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Refreshing network info cache for port e508f4ea-8f2b-492f-b0e2-3eb68afaaa15 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 933.669052] env[61898]: DEBUG nova.compute.manager [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 933.706421] env[61898]: DEBUG nova.network.neutron [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Successfully created port: 4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 933.733045] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784a4c96-6ca5-4a70-86f7-65265051063b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.753122] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb05dc2-0f70-4a7e-a3ec-19b19e78088a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.760031] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance '7c6aad92-6e91-48fc-89ae-5ee4c89f449c' progress to 83 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 933.994958] env[61898]: DEBUG nova.scheduler.client.report [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 933.997964] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "refresh_cache-5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.033788] env[61898]: DEBUG oslo_vmware.api [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241016, 'name': PowerOnVM_Task, 'duration_secs': 0.472233} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.034310] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 934.034772] env[61898]: INFO nova.compute.manager [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Took 8.53 seconds to spawn the instance on the hypervisor. [ 934.035171] env[61898]: DEBUG nova.compute.manager [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 934.036140] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d40e7b-3d7b-41e6-b9d2-674a1dd906ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.268020] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-65bdc501-a42d-4ced-a248-4278776e281f tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance '7c6aad92-6e91-48fc-89ae-5ee4c89f449c' progress to 100 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 934.339781] env[61898]: DEBUG nova.network.neutron [req-da1b3885-9a04-4a83-afda-7325241a3157 req-cc58841b-2700-438f-a669-d3e7c2a3759f service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.494559] env[61898]: DEBUG nova.network.neutron [req-da1b3885-9a04-4a83-afda-7325241a3157 req-cc58841b-2700-438f-a669-d3e7c2a3759f service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.555727] env[61898]: INFO nova.compute.manager [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Took 26.25 seconds to build instance. [ 934.676736] env[61898]: DEBUG nova.compute.manager [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 934.712249] env[61898]: DEBUG nova.virt.hardware [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 934.712249] env[61898]: DEBUG nova.virt.hardware [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 934.712249] env[61898]: DEBUG nova.virt.hardware [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.712249] env[61898]: DEBUG nova.virt.hardware [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 934.712249] env[61898]: DEBUG nova.virt.hardware [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.712509] env[61898]: DEBUG nova.virt.hardware [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 934.712605] env[61898]: DEBUG nova.virt.hardware [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 934.712768] env[61898]: DEBUG nova.virt.hardware [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 934.712986] env[61898]: DEBUG nova.virt.hardware [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 934.713107] env[61898]: DEBUG nova.virt.hardware [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 934.713302] env[61898]: DEBUG nova.virt.hardware [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 934.714251] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced9954e-74ad-43d3-93d4-85d429bdb9ad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.723022] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611576a7-44a2-4667-b6b0-cbaaaf5dc3eb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.996198] env[61898]: DEBUG oslo_concurrency.lockutils [req-da1b3885-9a04-4a83-afda-7325241a3157 req-cc58841b-2700-438f-a669-d3e7c2a3759f service nova] Releasing lock "refresh_cache-5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.996555] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "refresh_cache-5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.996716] env[61898]: DEBUG nova.network.neutron [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.005439] env[61898]: DEBUG oslo_concurrency.lockutils [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.346s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.008730] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.871s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.010183] env[61898]: INFO nova.compute.claims [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.057492] env[61898]: DEBUG oslo_concurrency.lockutils [None req-24a4e084-f2ac-4139-8458-8c1b67e8cba6 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "47208ebd-8407-4d00-8378-adb0a4a21c2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.767s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.289051] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 935.289313] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 935.289522] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Deleting the datastore file [datastore1] 11ca5129-0dc3-44b3-8f7b-215c93dac764 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 935.290169] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4cfe941-b250-46c4-98ba-57b2230e3373 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.296624] env[61898]: DEBUG oslo_vmware.api [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for the task: (returnval){ [ 935.296624] env[61898]: value = "task-1241018" [ 935.296624] env[61898]: _type = "Task" [ 935.296624] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.304454] env[61898]: DEBUG oslo_vmware.api [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1241018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.535359] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6332caee-5229-4e17-95b1-f80c24c85ee1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.557747] env[61898]: DEBUG nova.network.neutron [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 935.565965] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f690330f-c3c7-47a5-b197-a056d01d5f84 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Suspending the VM {{(pid=61898) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 935.565965] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-38cc0eae-1174-4d27-961a-d74d20f0bc06 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.575153] env[61898]: DEBUG oslo_vmware.api [None req-f690330f-c3c7-47a5-b197-a056d01d5f84 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 935.575153] env[61898]: value = "task-1241019" [ 935.575153] env[61898]: _type = "Task" [ 935.575153] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.592129] env[61898]: DEBUG oslo_vmware.api [None req-f690330f-c3c7-47a5-b197-a056d01d5f84 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241019, 'name': SuspendVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.593603] env[61898]: INFO nova.scheduler.client.report [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted allocation for migration 2e52a2f3-ee15-4f80-b956-7e2560a4a289 [ 935.808996] env[61898]: DEBUG oslo_vmware.api [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Task: {'id': task-1241018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280704} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.809959] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 935.810204] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 935.810393] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 935.810562] env[61898]: INFO nova.compute.manager [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Took 2.79 seconds to destroy the instance on the hypervisor. [ 935.811268] env[61898]: DEBUG oslo.service.loopingcall [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.811412] env[61898]: DEBUG nova.compute.manager [-] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 935.811547] env[61898]: DEBUG nova.network.neutron [-] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 935.927066] env[61898]: DEBUG nova.compute.manager [req-46896061-94a4-48c1-9750-c324f9bc9962 req-2a54e03f-ac27-4e22-bbf4-53ffe6d89899 service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Received event network-vif-plugged-4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 935.927308] env[61898]: DEBUG oslo_concurrency.lockutils [req-46896061-94a4-48c1-9750-c324f9bc9962 req-2a54e03f-ac27-4e22-bbf4-53ffe6d89899 service nova] Acquiring lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 935.927522] env[61898]: DEBUG oslo_concurrency.lockutils [req-46896061-94a4-48c1-9750-c324f9bc9962 req-2a54e03f-ac27-4e22-bbf4-53ffe6d89899 service nova] Lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.927692] env[61898]: DEBUG oslo_concurrency.lockutils [req-46896061-94a4-48c1-9750-c324f9bc9962 req-2a54e03f-ac27-4e22-bbf4-53ffe6d89899 service nova] Lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.927862] env[61898]: DEBUG nova.compute.manager [req-46896061-94a4-48c1-9750-c324f9bc9962 req-2a54e03f-ac27-4e22-bbf4-53ffe6d89899 service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] No waiting events found dispatching network-vif-plugged-4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 935.928525] env[61898]: WARNING nova.compute.manager [req-46896061-94a4-48c1-9750-c324f9bc9962 req-2a54e03f-ac27-4e22-bbf4-53ffe6d89899 service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Received unexpected event network-vif-plugged-4bed7107-cc7d-431f-a835-84a51f188455 for instance with vm_state building and task_state spawning. [ 936.046301] env[61898]: DEBUG nova.network.neutron [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Updating instance_info_cache with network_info: [{"id": "e508f4ea-8f2b-492f-b0e2-3eb68afaaa15", "address": "fa:16:3e:f1:cf:07", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape508f4ea-8f", "ovs_interfaceid": "e508f4ea-8f2b-492f-b0e2-3eb68afaaa15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.055517] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.057146] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.002s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.057408] env[61898]: DEBUG nova.compute.manager [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Going to confirm migration 2 {{(pid=61898) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5112}} [ 936.060701] env[61898]: DEBUG nova.network.neutron [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Successfully updated port: 4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 936.088356] env[61898]: DEBUG oslo_vmware.api [None req-f690330f-c3c7-47a5-b197-a056d01d5f84 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241019, 'name': SuspendVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.101479] env[61898]: DEBUG oslo_concurrency.lockutils [None req-883cd8c2-cc33-4277-9b1c-fe49ce849dac tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 18.066s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.396716] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72ffe36-f1de-4c11-bec7-aa50c3766edf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.405474] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00fea9c-f3c7-4a3a-baf9-13bb431fad5b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.447147] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6535e23-3e67-4b90-9d13-d5d656c08551 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.453311] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833d7a3d-a05f-4de5-8cee-4a1cbd43dc03 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.466439] env[61898]: DEBUG nova.compute.provider_tree [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.563336] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "refresh_cache-5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 936.563756] env[61898]: DEBUG nova.compute.manager [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Instance network_info: |[{"id": "e508f4ea-8f2b-492f-b0e2-3eb68afaaa15", "address": "fa:16:3e:f1:cf:07", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape508f4ea-8f", "ovs_interfaceid": "e508f4ea-8f2b-492f-b0e2-3eb68afaaa15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 936.564182] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:cf:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e508f4ea-8f2b-492f-b0e2-3eb68afaaa15', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 936.572205] env[61898]: DEBUG oslo.service.loopingcall [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.575070] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.575263] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.575443] env[61898]: DEBUG nova.network.neutron [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.576481] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 936.578677] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87ed6cb7-fca6-4b18-8582-babd1435c504 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.603704] env[61898]: DEBUG oslo_vmware.api [None req-f690330f-c3c7-47a5-b197-a056d01d5f84 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241019, 'name': SuspendVM_Task, 'duration_secs': 0.654446} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.604961] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f690330f-c3c7-47a5-b197-a056d01d5f84 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Suspended the VM {{(pid=61898) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 936.605237] env[61898]: DEBUG nova.compute.manager [None req-f690330f-c3c7-47a5-b197-a056d01d5f84 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 936.605508] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 936.605508] env[61898]: value = "task-1241020" [ 936.605508] env[61898]: _type = "Task" [ 936.605508] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.606408] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45772a55-e2ba-4404-9287-c3d881319082 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.620992] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241020, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.677677] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.677875] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.678067] env[61898]: DEBUG nova.network.neutron [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.678266] env[61898]: DEBUG nova.objects.instance [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'info_cache' on Instance uuid 7c6aad92-6e91-48fc-89ae-5ee4c89f449c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.969128] env[61898]: DEBUG nova.scheduler.client.report [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 937.125922] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241020, 'name': CreateVM_Task, 'duration_secs': 0.411608} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.127065] env[61898]: DEBUG nova.network.neutron [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 937.130284] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 937.131093] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.131308] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.131669] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 937.132387] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e593f19b-9657-4565-be26-43d1fd9ec725 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.137296] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 937.137296] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52bffa98-1784-d4d9-2302-3528e2adf5b3" [ 937.137296] env[61898]: _type = "Task" [ 937.137296] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.146035] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52bffa98-1784-d4d9-2302-3528e2adf5b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.235805] env[61898]: DEBUG nova.network.neutron [-] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.399875] env[61898]: DEBUG nova.network.neutron [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updating instance_info_cache with network_info: [{"id": "4bed7107-cc7d-431f-a835-84a51f188455", "address": "fa:16:3e:44:b0:e6", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bed7107-cc", "ovs_interfaceid": "4bed7107-cc7d-431f-a835-84a51f188455", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.475370] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.466s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.475915] env[61898]: DEBUG nova.compute.manager [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 937.478845] env[61898]: DEBUG oslo_concurrency.lockutils [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.161s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.480392] env[61898]: DEBUG nova.objects.instance [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lazy-loading 'resources' on Instance uuid 9b7b9962-fda1-46af-9ecc-ea5b352d5193 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 937.647699] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.647975] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Processing image 7d207fb3-39a4-452d-a133-40f06b6cc713 {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 937.648223] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713/7d207fb3-39a4-452d-a133-40f06b6cc713.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.648371] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713/7d207fb3-39a4-452d-a133-40f06b6cc713.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.648543] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 937.648788] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a318820-1928-4b4f-bb1c-8d2e4c3ede44 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.656989] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 937.657204] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 937.657899] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d36262de-d1e5-4a03-add1-a6e6bad0f18b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.663526] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 937.663526] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52cbf3c9-e778-8e22-2442-8a5aa34c1ec9" [ 937.663526] env[61898]: _type = "Task" [ 937.663526] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.674227] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cbf3c9-e778-8e22-2442-8a5aa34c1ec9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.742181] env[61898]: INFO nova.compute.manager [-] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Took 1.93 seconds to deallocate network for instance. [ 937.901442] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.901786] env[61898]: DEBUG nova.compute.manager [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Instance network_info: |[{"id": "4bed7107-cc7d-431f-a835-84a51f188455", "address": "fa:16:3e:44:b0:e6", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bed7107-cc", "ovs_interfaceid": "4bed7107-cc7d-431f-a835-84a51f188455", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 937.902245] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:b0:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b56036cd-97ac-47f5-9089-7b38bfe99228', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4bed7107-cc7d-431f-a835-84a51f188455', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 937.911502] env[61898]: DEBUG oslo.service.loopingcall [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 937.912067] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 937.912314] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-829c06ae-1412-457a-9867-0703f5688673 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.934911] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 937.934911] env[61898]: value = "task-1241021" [ 937.934911] env[61898]: _type = "Task" [ 937.934911] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.943754] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241021, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.983704] env[61898]: DEBUG nova.compute.utils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 937.988504] env[61898]: DEBUG nova.compute.manager [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 937.988734] env[61898]: DEBUG nova.network.neutron [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 938.120082] env[61898]: DEBUG nova.policy [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f25c631adf3c4f68b374a35b767a9429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30bd396aa1ff45ad946bc1a6fdb3b40b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 938.142809] env[61898]: DEBUG nova.network.neutron [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance_info_cache with network_info: [{"id": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "address": "fa:16:3e:42:f4:b3", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f33f2c4-46", "ovs_interfaceid": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.159408] env[61898]: DEBUG nova.compute.manager [req-510ab46f-ae63-40e8-afea-aba0d71bf853 req-9ee65ff9-c326-45f2-be94-bf30270ee849 service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Received event network-changed-4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 938.159618] env[61898]: DEBUG nova.compute.manager [req-510ab46f-ae63-40e8-afea-aba0d71bf853 req-9ee65ff9-c326-45f2-be94-bf30270ee849 service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Refreshing instance network info cache due to event network-changed-4bed7107-cc7d-431f-a835-84a51f188455. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 938.159908] env[61898]: DEBUG oslo_concurrency.lockutils [req-510ab46f-ae63-40e8-afea-aba0d71bf853 req-9ee65ff9-c326-45f2-be94-bf30270ee849 service nova] Acquiring lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.160223] env[61898]: DEBUG oslo_concurrency.lockutils [req-510ab46f-ae63-40e8-afea-aba0d71bf853 req-9ee65ff9-c326-45f2-be94-bf30270ee849 service nova] Acquired lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.160315] env[61898]: DEBUG nova.network.neutron [req-510ab46f-ae63-40e8-afea-aba0d71bf853 req-9ee65ff9-c326-45f2-be94-bf30270ee849 service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Refreshing network info cache for port 4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 938.178721] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Preparing fetch location {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 938.178996] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Fetch image to [datastore2] OSTACK_IMG_3ea4b3b3-2fa2-4590-b491-74fe62a5609c/OSTACK_IMG_3ea4b3b3-2fa2-4590-b491-74fe62a5609c.vmdk {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 938.179232] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Downloading stream optimized image 7d207fb3-39a4-452d-a133-40f06b6cc713 to [datastore2] OSTACK_IMG_3ea4b3b3-2fa2-4590-b491-74fe62a5609c/OSTACK_IMG_3ea4b3b3-2fa2-4590-b491-74fe62a5609c.vmdk on the data store datastore2 as vApp {{(pid=61898) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 938.179417] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Downloading image file data 7d207fb3-39a4-452d-a133-40f06b6cc713 to the ESX as VM named 'OSTACK_IMG_3ea4b3b3-2fa2-4590-b491-74fe62a5609c' {{(pid=61898) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 938.220565] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.220809] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.221026] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.221229] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.221394] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.226154] env[61898]: INFO nova.compute.manager [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Terminating instance [ 938.244376] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "47208ebd-8407-4d00-8378-adb0a4a21c2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.244700] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "47208ebd-8407-4d00-8378-adb0a4a21c2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.244967] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "47208ebd-8407-4d00-8378-adb0a4a21c2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.245236] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "47208ebd-8407-4d00-8378-adb0a4a21c2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.245435] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "47208ebd-8407-4d00-8378-adb0a4a21c2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.251716] env[61898]: INFO nova.compute.manager [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Terminating instance [ 938.254165] env[61898]: DEBUG oslo_concurrency.lockutils [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.285792] env[61898]: DEBUG oslo_vmware.rw_handles [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 938.285792] env[61898]: value = "resgroup-9" [ 938.285792] env[61898]: _type = "ResourcePool" [ 938.285792] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 938.286159] env[61898]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-dbda294e-c7b5-4a7b-bb53-a40fb8d731ea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.306714] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-602d4522-ea05-4243-9a0a-c5a972e3190e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.310773] env[61898]: DEBUG oslo_vmware.rw_handles [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lease: (returnval){ [ 938.310773] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52656a5d-eb98-7ba0-5e6d-817255a2dff1" [ 938.310773] env[61898]: _type = "HttpNfcLease" [ 938.310773] env[61898]: } obtained for vApp import into resource pool (val){ [ 938.310773] env[61898]: value = "resgroup-9" [ 938.310773] env[61898]: _type = "ResourcePool" [ 938.310773] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 938.311489] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the lease: (returnval){ [ 938.311489] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52656a5d-eb98-7ba0-5e6d-817255a2dff1" [ 938.311489] env[61898]: _type = "HttpNfcLease" [ 938.311489] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 938.318837] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe7b944-c7d4-4bfd-8770-7cfed4e4df49 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.322589] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 938.322589] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52656a5d-eb98-7ba0-5e6d-817255a2dff1" [ 938.322589] env[61898]: _type = "HttpNfcLease" [ 938.322589] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 938.351450] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd89079-862d-441f-b542-119991d5cc16 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.359059] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1025798-7101-45d1-938f-692ff1c99d50 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.372956] env[61898]: DEBUG nova.compute.provider_tree [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.445391] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241021, 'name': CreateVM_Task, 'duration_secs': 0.390262} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.445599] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 938.446355] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.446948] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.446948] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 938.447714] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95391393-719f-4093-bd55-731b17a460c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.453153] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 938.453153] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52c2c566-9a35-2845-5576-1a6576059dc4" [ 938.453153] env[61898]: _type = "Task" [ 938.453153] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.463453] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c2c566-9a35-2845-5576-1a6576059dc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.489265] env[61898]: DEBUG nova.compute.manager [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 938.645980] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.646138] env[61898]: DEBUG nova.objects.instance [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'migration_context' on Instance uuid 7c6aad92-6e91-48fc-89ae-5ee4c89f449c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.662358] env[61898]: DEBUG nova.network.neutron [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Successfully created port: 0e2a07e5-49cf-4f3c-8767-1535ccfb295b {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 938.732289] env[61898]: DEBUG nova.compute.manager [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 938.732514] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 938.733407] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34ca335-8998-4da5-94be-ba2c817d36ec {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.741076] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 938.741324] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf6d2f73-b00b-44a8-8d33-9f1731d306b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.746780] env[61898]: DEBUG oslo_vmware.api [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 938.746780] env[61898]: value = "task-1241023" [ 938.746780] env[61898]: _type = "Task" [ 938.746780] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.755776] env[61898]: DEBUG oslo_vmware.api [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241023, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.756865] env[61898]: DEBUG nova.compute.manager [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 938.757076] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 938.757855] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013553a6-1e97-4047-b1c1-7d8c7b8c99f6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.765010] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 938.765282] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3c54f5c-b767-413e-b24e-ae3555b07055 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.819956] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 938.819956] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52656a5d-eb98-7ba0-5e6d-817255a2dff1" [ 938.819956] env[61898]: _type = "HttpNfcLease" [ 938.819956] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 938.846208] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 938.846466] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 938.846706] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleting the datastore file [datastore1] 47208ebd-8407-4d00-8378-adb0a4a21c2a {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 938.846967] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec30e699-731b-4e08-8309-2f02d616df25 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.855720] env[61898]: DEBUG oslo_vmware.api [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 938.855720] env[61898]: value = "task-1241025" [ 938.855720] env[61898]: _type = "Task" [ 938.855720] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.865146] env[61898]: DEBUG oslo_vmware.api [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.876463] env[61898]: DEBUG nova.scheduler.client.report [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 938.924809] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "c26c4add-728c-45ea-8465-7c4273b7d97b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.925101] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.963865] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c2c566-9a35-2845-5576-1a6576059dc4, 'name': SearchDatastore_Task, 'duration_secs': 0.009727} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.964227] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.964515] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 938.964771] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.964966] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.965185] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 938.965449] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b10667bd-dac0-4179-a29a-ccab69cafe42 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.972643] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 938.972827] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 938.973884] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-788efa2c-6225-4327-a1c7-b3bdc638dbe9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.979375] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 938.979375] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52bb30a3-2488-04c7-f0f6-5287eb19e940" [ 938.979375] env[61898]: _type = "Task" [ 938.979375] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.988082] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52bb30a3-2488-04c7-f0f6-5287eb19e940, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.149620] env[61898]: DEBUG nova.objects.base [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Object Instance<7c6aad92-6e91-48fc-89ae-5ee4c89f449c> lazy-loaded attributes: info_cache,migration_context {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 939.150694] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21672f7-6445-4e51-ac88-bea880b064e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.176414] env[61898]: DEBUG nova.network.neutron [req-510ab46f-ae63-40e8-afea-aba0d71bf853 req-9ee65ff9-c326-45f2-be94-bf30270ee849 service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updated VIF entry in instance network info cache for port 4bed7107-cc7d-431f-a835-84a51f188455. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 939.176414] env[61898]: DEBUG nova.network.neutron [req-510ab46f-ae63-40e8-afea-aba0d71bf853 req-9ee65ff9-c326-45f2-be94-bf30270ee849 service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updating instance_info_cache with network_info: [{"id": "4bed7107-cc7d-431f-a835-84a51f188455", "address": "fa:16:3e:44:b0:e6", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bed7107-cc", "ovs_interfaceid": "4bed7107-cc7d-431f-a835-84a51f188455", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.176414] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f3717d6-e8ac-4621-8f48-a77e99ec0616 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.182722] env[61898]: DEBUG oslo_vmware.api [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 939.182722] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a3f824-7d39-20fc-4397-527d0d14aa82" [ 939.182722] env[61898]: _type = "Task" [ 939.182722] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.192776] env[61898]: DEBUG oslo_vmware.api [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a3f824-7d39-20fc-4397-527d0d14aa82, 'name': SearchDatastore_Task, 'duration_secs': 0.007886} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.193079] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.258160] env[61898]: DEBUG oslo_vmware.api [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241023, 'name': PowerOffVM_Task, 'duration_secs': 0.335984} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.258511] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 939.258714] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 939.259011] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7b7c235-b506-4656-aabd-7da49a61bb20 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.321563] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 939.321563] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52656a5d-eb98-7ba0-5e6d-817255a2dff1" [ 939.321563] env[61898]: _type = "HttpNfcLease" [ 939.321563] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 939.323033] env[61898]: DEBUG oslo_vmware.rw_handles [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 939.323033] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52656a5d-eb98-7ba0-5e6d-817255a2dff1" [ 939.323033] env[61898]: _type = "HttpNfcLease" [ 939.323033] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 939.323652] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 939.323876] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 939.324331] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleting the datastore file [datastore2] 5323b250-fad8-4d71-81ed-c5e5eeb8aeab {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 939.324871] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1082d286-7631-484a-b4e3-c5b5d7fe2085 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.328398] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93115212-d4e0-403e-ace9-c4fa015f3759 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.334763] env[61898]: DEBUG oslo_vmware.rw_handles [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52200289-73e6-de01-0381-d97d0b67c6e8/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 939.334947] env[61898]: DEBUG oslo_vmware.rw_handles [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52200289-73e6-de01-0381-d97d0b67c6e8/disk-0.vmdk. {{(pid=61898) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 939.338018] env[61898]: DEBUG oslo_vmware.api [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 939.338018] env[61898]: value = "task-1241027" [ 939.338018] env[61898]: _type = "Task" [ 939.338018] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.398584] env[61898]: DEBUG oslo_concurrency.lockutils [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.919s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.407074] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.752s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.407442] env[61898]: DEBUG nova.objects.instance [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lazy-loading 'resources' on Instance uuid aab10d8f-0d25-4351-a627-7222be63895e {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.417331] env[61898]: DEBUG oslo_vmware.api [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241027, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.418536] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-581af289-5607-44fe-ad97-5de249633118 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.426336] env[61898]: DEBUG oslo_vmware.api [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241142} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.427115] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.427529] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 939.427786] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 939.428127] env[61898]: INFO nova.compute.manager [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Took 0.67 seconds to destroy the instance on the hypervisor. [ 939.428454] env[61898]: DEBUG oslo.service.loopingcall [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.430167] env[61898]: DEBUG nova.compute.manager [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 939.434180] env[61898]: DEBUG nova.compute.manager [-] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 939.434368] env[61898]: DEBUG nova.network.neutron [-] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 939.437934] env[61898]: INFO nova.scheduler.client.report [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Deleted allocations for instance 9b7b9962-fda1-46af-9ecc-ea5b352d5193 [ 939.490096] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52bb30a3-2488-04c7-f0f6-5287eb19e940, 'name': SearchDatastore_Task, 'duration_secs': 0.008532} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.491012] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-666b146d-f11f-4136-a87d-6e4e50672ed9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.497089] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 939.497089] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521fc35c-902f-8dee-8c83-a8b5b3e00385" [ 939.497089] env[61898]: _type = "Task" [ 939.497089] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.498324] env[61898]: DEBUG nova.compute.manager [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 939.513229] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521fc35c-902f-8dee-8c83-a8b5b3e00385, 'name': SearchDatastore_Task, 'duration_secs': 0.011366} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.513509] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.513771] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 4522f4ef-c8f6-4fe1-acd5-796f87f22839/4522f4ef-c8f6-4fe1-acd5-796f87f22839.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 939.514059] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6877a77-6c23-4f5b-9970-8fd44e61282d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.520959] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 939.520959] env[61898]: value = "task-1241028" [ 939.520959] env[61898]: _type = "Task" [ 939.520959] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.530927] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241028, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.533411] env[61898]: DEBUG nova.virt.hardware [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 939.533639] env[61898]: DEBUG nova.virt.hardware [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 939.533796] env[61898]: DEBUG nova.virt.hardware [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.533991] env[61898]: DEBUG nova.virt.hardware [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 939.534153] env[61898]: DEBUG nova.virt.hardware [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.534298] env[61898]: DEBUG nova.virt.hardware [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 939.534503] env[61898]: DEBUG nova.virt.hardware [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 939.534661] env[61898]: DEBUG nova.virt.hardware [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 939.534826] env[61898]: DEBUG nova.virt.hardware [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 939.534987] env[61898]: DEBUG nova.virt.hardware [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 939.535229] env[61898]: DEBUG nova.virt.hardware [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 939.536421] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd9b870-dfec-4977-9aa6-34ea304cfa9c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.544437] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d917fe17-41b9-4837-ab04-75b70005163a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.680129] env[61898]: DEBUG oslo_concurrency.lockutils [req-510ab46f-ae63-40e8-afea-aba0d71bf853 req-9ee65ff9-c326-45f2-be94-bf30270ee849 service nova] Releasing lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.680424] env[61898]: DEBUG nova.compute.manager [req-510ab46f-ae63-40e8-afea-aba0d71bf853 req-9ee65ff9-c326-45f2-be94-bf30270ee849 service nova] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Received event network-vif-deleted-0d791731-b395-4858-b0b0-86de8a660e18 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 939.857684] env[61898]: DEBUG oslo_vmware.api [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241027, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198997} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.858085] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.858225] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 939.858399] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 939.858626] env[61898]: INFO nova.compute.manager [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Took 1.13 seconds to destroy the instance on the hypervisor. [ 939.858920] env[61898]: DEBUG oslo.service.loopingcall [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 939.861705] env[61898]: DEBUG nova.compute.manager [-] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 939.861821] env[61898]: DEBUG nova.network.neutron [-] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 939.952658] env[61898]: DEBUG oslo_concurrency.lockutils [None req-380edc94-6939-42d9-9d3f-63aa2f650f66 tempest-ServerRescueTestJSON-2021790224 tempest-ServerRescueTestJSON-2021790224-project-member] Lock "9b7b9962-fda1-46af-9ecc-ea5b352d5193" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.449s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.970313] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.039953] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241028, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.242330] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e58456a-c802-4159-87d5-29b8dba920a5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.252888] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59e33b4-a15f-4886-bfac-1f80de855da3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.292181] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284ab541-2f01-43e4-8ed2-1620aa3f6240 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.302331] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1b7b12-c991-4e08-ab9f-e0b376025cd1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.316278] env[61898]: DEBUG nova.compute.provider_tree [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.383976] env[61898]: DEBUG oslo_vmware.rw_handles [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Completed reading data from the image iterator. {{(pid=61898) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 940.384257] env[61898]: DEBUG oslo_vmware.rw_handles [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52200289-73e6-de01-0381-d97d0b67c6e8/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 940.385254] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a5c2ea-f9e1-4bac-868c-971f341b78ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.393217] env[61898]: DEBUG oslo_vmware.rw_handles [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52200289-73e6-de01-0381-d97d0b67c6e8/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 940.393217] env[61898]: DEBUG oslo_vmware.rw_handles [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52200289-73e6-de01-0381-d97d0b67c6e8/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 940.393474] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-81782198-a879-4bbc-8e1f-3d64fa207460 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.535258] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241028, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610347} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.535551] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 4522f4ef-c8f6-4fe1-acd5-796f87f22839/4522f4ef-c8f6-4fe1-acd5-796f87f22839.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 940.535775] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 940.536044] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-60be1346-b618-4449-871f-1793c6a3a84c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.543072] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 940.543072] env[61898]: value = "task-1241029" [ 940.543072] env[61898]: _type = "Task" [ 940.543072] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.550095] env[61898]: DEBUG nova.compute.manager [req-829a8f61-65a5-4a61-b3a1-4e9a437f84d4 req-1b76eaf5-2cf7-481d-985c-6e6098088f4a service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Received event network-vif-deleted-1055fd09-3d12-49b4-bac7-a4b6b9208ca0 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 940.550322] env[61898]: INFO nova.compute.manager [req-829a8f61-65a5-4a61-b3a1-4e9a437f84d4 req-1b76eaf5-2cf7-481d-985c-6e6098088f4a service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Neutron deleted interface 1055fd09-3d12-49b4-bac7-a4b6b9208ca0; detaching it from the instance and deleting it from the info cache [ 940.550995] env[61898]: DEBUG nova.network.neutron [req-829a8f61-65a5-4a61-b3a1-4e9a437f84d4 req-1b76eaf5-2cf7-481d-985c-6e6098088f4a service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.558650] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241029, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.794372] env[61898]: DEBUG nova.network.neutron [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Successfully updated port: 0e2a07e5-49cf-4f3c-8767-1535ccfb295b {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 940.819280] env[61898]: DEBUG nova.scheduler.client.report [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 940.845559] env[61898]: DEBUG nova.network.neutron [-] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.982891] env[61898]: DEBUG nova.network.neutron [-] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.005303] env[61898]: DEBUG oslo_vmware.rw_handles [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52200289-73e6-de01-0381-d97d0b67c6e8/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 941.005442] env[61898]: INFO nova.virt.vmwareapi.images [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Downloaded image file data 7d207fb3-39a4-452d-a133-40f06b6cc713 [ 941.007287] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908e6f8a-6827-4807-a87f-e40f2f72b5d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.029243] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8818798e-2695-4b73-996e-98995b08c005 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.054223] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241029, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.187984} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.054481] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-516120bd-f812-4fec-a3e2-a601a791bbd7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.056384] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 941.057611] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760d12ec-39d0-42ea-a72b-5c57b375d9a6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.084762] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 4522f4ef-c8f6-4fe1-acd5-796f87f22839/4522f4ef-c8f6-4fe1-acd5-796f87f22839.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 941.086987] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd5f1c60-8c03-4ee2-bc4d-e5e9064501db {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.105894] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b3d085-fa94-4477-8097-24fae45f9280 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.120735] env[61898]: INFO nova.virt.vmwareapi.images [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] The imported VM was unregistered [ 941.124242] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Caching image {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 941.124480] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating directory with path [datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713 {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.126098] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0349b24d-02ea-4dd9-b09b-1c611eb6f52b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.128043] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 941.128043] env[61898]: value = "task-1241031" [ 941.128043] env[61898]: _type = "Task" [ 941.128043] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.136363] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241031, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.137915] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Created directory with path [datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713 {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.138108] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_3ea4b3b3-2fa2-4590-b491-74fe62a5609c/OSTACK_IMG_3ea4b3b3-2fa2-4590-b491-74fe62a5609c.vmdk to [datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713/7d207fb3-39a4-452d-a133-40f06b6cc713.vmdk. {{(pid=61898) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 941.138346] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-b1e6f8e4-e054-4675-9212-d8983623380d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.149539] env[61898]: DEBUG nova.compute.manager [req-829a8f61-65a5-4a61-b3a1-4e9a437f84d4 req-1b76eaf5-2cf7-481d-985c-6e6098088f4a service nova] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Detach interface failed, port_id=1055fd09-3d12-49b4-bac7-a4b6b9208ca0, reason: Instance 47208ebd-8407-4d00-8378-adb0a4a21c2a could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 941.154222] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 941.154222] env[61898]: value = "task-1241032" [ 941.154222] env[61898]: _type = "Task" [ 941.154222] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.161774] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241032, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.297443] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "refresh_cache-a2ceed2d-be5e-4baa-b2a7-1116812e775d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.297717] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "refresh_cache-a2ceed2d-be5e-4baa-b2a7-1116812e775d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.297871] env[61898]: DEBUG nova.network.neutron [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 941.325164] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.918s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.327775] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.843s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.328848] env[61898]: DEBUG nova.objects.instance [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lazy-loading 'pci_requests' on Instance uuid 45b8dc91-b577-4548-bf3a-32c7c936c616 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 941.347782] env[61898]: INFO nova.compute.manager [-] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Took 1.91 seconds to deallocate network for instance. [ 941.360320] env[61898]: INFO nova.scheduler.client.report [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleted allocations for instance aab10d8f-0d25-4351-a627-7222be63895e [ 941.486629] env[61898]: INFO nova.compute.manager [-] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Took 1.62 seconds to deallocate network for instance. [ 941.649394] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241031, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.669584] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241032, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.832019] env[61898]: DEBUG nova.objects.instance [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lazy-loading 'numa_topology' on Instance uuid 45b8dc91-b577-4548-bf3a-32c7c936c616 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 941.842673] env[61898]: DEBUG nova.network.neutron [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 941.855785] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.871692] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2385643-0826-40c5-b572-54cf2e624ccf tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "aab10d8f-0d25-4351-a627-7222be63895e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.628s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.993844] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.003412] env[61898]: DEBUG nova.network.neutron [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Updating instance_info_cache with network_info: [{"id": "0e2a07e5-49cf-4f3c-8767-1535ccfb295b", "address": "fa:16:3e:e5:05:7f", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e2a07e5-49", "ovs_interfaceid": "0e2a07e5-49cf-4f3c-8767-1535ccfb295b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.141020] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241031, 'name': ReconfigVM_Task, 'duration_secs': 0.764278} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.141358] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 4522f4ef-c8f6-4fe1-acd5-796f87f22839/4522f4ef-c8f6-4fe1-acd5-796f87f22839.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 942.142061] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70a6343c-b5b9-4b31-8587-5bfb55c8b9b8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.149472] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 942.149472] env[61898]: value = "task-1241033" [ 942.149472] env[61898]: _type = "Task" [ 942.149472] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.158592] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241033, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.167407] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241032, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.335666] env[61898]: INFO nova.compute.claims [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 942.506436] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "refresh_cache-a2ceed2d-be5e-4baa-b2a7-1116812e775d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.506436] env[61898]: DEBUG nova.compute.manager [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Instance network_info: |[{"id": "0e2a07e5-49cf-4f3c-8767-1535ccfb295b", "address": "fa:16:3e:e5:05:7f", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e2a07e5-49", "ovs_interfaceid": "0e2a07e5-49cf-4f3c-8767-1535ccfb295b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 942.507343] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:05:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4223acd2-30f7-440e-b975-60b30d931694', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e2a07e5-49cf-4f3c-8767-1535ccfb295b', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 942.514615] env[61898]: DEBUG oslo.service.loopingcall [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.515733] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 942.515986] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-deaa5676-2146-406c-b1f3-6102d21b7343 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.297366] env[61898]: DEBUG oslo_concurrency.lockutils [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "1aa03975-f18f-4e64-836e-e991b73ee9d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.297603] env[61898]: DEBUG oslo_concurrency.lockutils [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "1aa03975-f18f-4e64-836e-e991b73ee9d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.297805] env[61898]: DEBUG oslo_concurrency.lockutils [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "1aa03975-f18f-4e64-836e-e991b73ee9d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.297981] env[61898]: DEBUG oslo_concurrency.lockutils [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "1aa03975-f18f-4e64-836e-e991b73ee9d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.298171] env[61898]: DEBUG oslo_concurrency.lockutils [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "1aa03975-f18f-4e64-836e-e991b73ee9d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.300893] env[61898]: DEBUG nova.compute.manager [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Received event network-vif-deleted-dc3e4cf3-8bb3-47aa-83df-7ce64ffec90a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 943.301162] env[61898]: DEBUG nova.compute.manager [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Received event network-vif-plugged-0e2a07e5-49cf-4f3c-8767-1535ccfb295b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 943.301459] env[61898]: DEBUG oslo_concurrency.lockutils [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] Acquiring lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.301560] env[61898]: DEBUG oslo_concurrency.lockutils [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] Lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.301620] env[61898]: DEBUG oslo_concurrency.lockutils [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] Lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.301781] env[61898]: DEBUG nova.compute.manager [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] No waiting events found dispatching network-vif-plugged-0e2a07e5-49cf-4f3c-8767-1535ccfb295b {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 943.301943] env[61898]: WARNING nova.compute.manager [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Received unexpected event network-vif-plugged-0e2a07e5-49cf-4f3c-8767-1535ccfb295b for instance with vm_state building and task_state spawning. [ 943.302120] env[61898]: DEBUG nova.compute.manager [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Received event network-changed-0e2a07e5-49cf-4f3c-8767-1535ccfb295b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 943.302270] env[61898]: DEBUG nova.compute.manager [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Refreshing instance network info cache due to event network-changed-0e2a07e5-49cf-4f3c-8767-1535ccfb295b. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 943.302450] env[61898]: DEBUG oslo_concurrency.lockutils [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] Acquiring lock "refresh_cache-a2ceed2d-be5e-4baa-b2a7-1116812e775d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.302584] env[61898]: DEBUG oslo_concurrency.lockutils [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] Acquired lock "refresh_cache-a2ceed2d-be5e-4baa-b2a7-1116812e775d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.302736] env[61898]: DEBUG nova.network.neutron [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Refreshing network info cache for port 0e2a07e5-49cf-4f3c-8767-1535ccfb295b {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 943.307033] env[61898]: INFO nova.compute.manager [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Terminating instance [ 943.319085] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 943.319085] env[61898]: value = "task-1241034" [ 943.319085] env[61898]: _type = "Task" [ 943.319085] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.326229] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241032, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.326970] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241033, 'name': Rename_Task, 'duration_secs': 0.974289} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.331455] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 943.331553] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-821473c7-b2a7-4cc4-ac38-524282ff7d03 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.340171] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241034, 'name': CreateVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.341667] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 943.341667] env[61898]: value = "task-1241035" [ 943.341667] env[61898]: _type = "Task" [ 943.341667] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.354543] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241035, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.808513] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241032, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.393917} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.809079] env[61898]: INFO nova.virt.vmwareapi.ds_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_3ea4b3b3-2fa2-4590-b491-74fe62a5609c/OSTACK_IMG_3ea4b3b3-2fa2-4590-b491-74fe62a5609c.vmdk to [datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713/7d207fb3-39a4-452d-a133-40f06b6cc713.vmdk. [ 943.809283] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Cleaning up location [datastore2] OSTACK_IMG_3ea4b3b3-2fa2-4590-b491-74fe62a5609c {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 943.809446] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_3ea4b3b3-2fa2-4590-b491-74fe62a5609c {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 943.809800] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d98e87b4-f513-4b1e-959d-b7d8b150918e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.818400] env[61898]: DEBUG nova.compute.manager [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 943.818400] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 943.818400] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bded54f6-6ea1-4d81-8b10-d38be8cb7a4b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.822047] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 943.822047] env[61898]: value = "task-1241036" [ 943.822047] env[61898]: _type = "Task" [ 943.822047] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.827315] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.830894] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c2d6a7f-784b-40ce-adb9-bd26d6956ef7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.835298] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241036, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.840496] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241034, 'name': CreateVM_Task, 'duration_secs': 0.470865} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.844060] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 943.844498] env[61898]: DEBUG oslo_vmware.api [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 943.844498] env[61898]: value = "task-1241037" [ 943.844498] env[61898]: _type = "Task" [ 943.844498] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.845344] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.845505] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.846010] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 943.849312] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3385897-ad87-4046-b954-9f6ee2755b8f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.861959] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241035, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.868414] env[61898]: DEBUG oslo_vmware.api [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1241037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.868764] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 943.868764] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]523e9742-e59c-a6c3-bc3a-586b8f64db3b" [ 943.868764] env[61898]: _type = "Task" [ 943.868764] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.879228] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523e9742-e59c-a6c3-bc3a-586b8f64db3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.095874] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d9696d-7eb9-4204-9669-fdb392665710 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.103262] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292c688b-536d-4b2d-a037-57d85326bde0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.140736] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44efff4c-946a-45e0-bffd-4bf5bef247c9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.148579] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa76813-7c89-41eb-b74c-d1582c55430a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.161874] env[61898]: DEBUG nova.compute.provider_tree [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.190348] env[61898]: DEBUG nova.network.neutron [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Updated VIF entry in instance network info cache for port 0e2a07e5-49cf-4f3c-8767-1535ccfb295b. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 944.191155] env[61898]: DEBUG nova.network.neutron [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Updating instance_info_cache with network_info: [{"id": "0e2a07e5-49cf-4f3c-8767-1535ccfb295b", "address": "fa:16:3e:e5:05:7f", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e2a07e5-49", "ovs_interfaceid": "0e2a07e5-49cf-4f3c-8767-1535ccfb295b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.332893] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241036, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109191} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.333160] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 944.333331] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713/7d207fb3-39a4-452d-a133-40f06b6cc713.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.333577] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713/7d207fb3-39a4-452d-a133-40f06b6cc713.vmdk to [datastore2] 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1/5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 944.333824] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94b785cb-a200-48dc-9f7e-b108d9e15cbd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.340221] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 944.340221] env[61898]: value = "task-1241038" [ 944.340221] env[61898]: _type = "Task" [ 944.340221] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.352469] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241038, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.359405] env[61898]: DEBUG oslo_vmware.api [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241035, 'name': PowerOnVM_Task, 'duration_secs': 0.698312} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.362133] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 944.362341] env[61898]: INFO nova.compute.manager [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Took 9.69 seconds to spawn the instance on the hypervisor. [ 944.362518] env[61898]: DEBUG nova.compute.manager [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 944.362783] env[61898]: DEBUG oslo_vmware.api [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1241037, 'name': PowerOffVM_Task, 'duration_secs': 0.215511} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.363450] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e301cf16-79b4-41a9-8b7e-9d8d5c8e7eb0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.365877] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.366062] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 944.366281] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2906df9a-fec0-4cc8-9dbe-86fffe8a0ae6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.381574] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523e9742-e59c-a6c3-bc3a-586b8f64db3b, 'name': SearchDatastore_Task, 'duration_secs': 0.038357} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.382308] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.382308] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 944.382308] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.382507] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.382616] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 944.382849] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1729e9de-d4fb-436b-aef4-d401a3e60338 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.392542] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 944.392741] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 944.393470] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ae10032-370f-421d-9ac5-29219e7ae60d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.398519] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 944.398519] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]523697f2-b49b-21f5-f699-2770f17387a5" [ 944.398519] env[61898]: _type = "Task" [ 944.398519] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.407267] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523697f2-b49b-21f5-f699-2770f17387a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.432409] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 944.432409] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 944.432588] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleting the datastore file [datastore2] 1aa03975-f18f-4e64-836e-e991b73ee9d5 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.432695] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cb727f9-a3a8-41cb-8ebf-e24d996a242e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.438247] env[61898]: DEBUG oslo_vmware.api [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for the task: (returnval){ [ 944.438247] env[61898]: value = "task-1241040" [ 944.438247] env[61898]: _type = "Task" [ 944.438247] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.446251] env[61898]: DEBUG oslo_vmware.api [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1241040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.664707] env[61898]: DEBUG nova.scheduler.client.report [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 944.693326] env[61898]: DEBUG oslo_concurrency.lockutils [req-891dded1-e177-4427-ab7b-b840d930a78b req-0d8191d8-770e-4793-bfa1-a53a91d8f878 service nova] Releasing lock "refresh_cache-a2ceed2d-be5e-4baa-b2a7-1116812e775d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.854271] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241038, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.887587] env[61898]: INFO nova.compute.manager [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Took 26.82 seconds to build instance. [ 944.911129] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523697f2-b49b-21f5-f699-2770f17387a5, 'name': SearchDatastore_Task, 'duration_secs': 0.039518} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.912498] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c10f3b8-889f-40e7-b4f8-c757a1cf7c16 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.919793] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 944.919793] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52adab6f-47db-bd77-7c73-28f27b5a4289" [ 944.919793] env[61898]: _type = "Task" [ 944.919793] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.930760] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52adab6f-47db-bd77-7c73-28f27b5a4289, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.950190] env[61898]: DEBUG oslo_vmware.api [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Task: {'id': task-1241040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26978} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.950616] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 944.950827] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 944.951021] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 944.951323] env[61898]: INFO nova.compute.manager [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 944.951631] env[61898]: DEBUG oslo.service.loopingcall [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.951839] env[61898]: DEBUG nova.compute.manager [-] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 944.951936] env[61898]: DEBUG nova.network.neutron [-] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 945.170683] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.843s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.174510] env[61898]: DEBUG oslo_concurrency.lockutils [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.394s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.174904] env[61898]: DEBUG nova.objects.instance [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Lazy-loading 'resources' on Instance uuid b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.241787] env[61898]: INFO nova.network.neutron [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updating port 53a6375d-a9c3-4c2e-8568-942c3c43bf4a with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 945.352524] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241038, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.390925] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ddbc5913-bbc4-4a5c-aea1-896833761267 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.356s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.440905] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52adab6f-47db-bd77-7c73-28f27b5a4289, 'name': SearchDatastore_Task, 'duration_secs': 0.080707} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.441633] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.441936] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] a2ceed2d-be5e-4baa-b2a7-1116812e775d/a2ceed2d-be5e-4baa-b2a7-1116812e775d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 945.443082] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee9ca515-189b-4577-b578-73be809e8466 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.453321] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 945.453321] env[61898]: value = "task-1241041" [ 945.453321] env[61898]: _type = "Task" [ 945.453321] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.467883] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241041, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.554786] env[61898]: DEBUG nova.compute.manager [req-e533fdc4-1d7e-4144-b772-d9bf35437202 req-3ae43949-5657-409d-9bf1-53251d37c7d9 service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Received event network-vif-deleted-ddb06f4c-13ed-4322-b1e8-f4022b32e4f4 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 945.555248] env[61898]: INFO nova.compute.manager [req-e533fdc4-1d7e-4144-b772-d9bf35437202 req-3ae43949-5657-409d-9bf1-53251d37c7d9 service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Neutron deleted interface ddb06f4c-13ed-4322-b1e8-f4022b32e4f4; detaching it from the instance and deleting it from the info cache [ 945.555479] env[61898]: DEBUG nova.network.neutron [req-e533fdc4-1d7e-4144-b772-d9bf35437202 req-3ae43949-5657-409d-9bf1-53251d37c7d9 service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.853794] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241038, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.941869] env[61898]: DEBUG nova.network.neutron [-] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.970464] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241041, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.987022] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579a9f76-d132-4097-aa2a-3ef28528302e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.999535] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d6f350-0722-41cf-b56e-6bf52d3ebcd4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.032042] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c288ca4f-aa9d-4043-96cb-1b543964fe9b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.042356] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f571bd-80e2-4434-889c-570218783cd8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.062770] env[61898]: DEBUG nova.compute.provider_tree [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.064640] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93c6d528-2b9b-4fd1-b71a-9f868eaa9911 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.077224] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476387ff-8e6c-4727-80be-3802dd08f8b0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.111227] env[61898]: DEBUG nova.compute.manager [req-e533fdc4-1d7e-4144-b772-d9bf35437202 req-3ae43949-5657-409d-9bf1-53251d37c7d9 service nova] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Detach interface failed, port_id=ddb06f4c-13ed-4322-b1e8-f4022b32e4f4, reason: Instance 1aa03975-f18f-4e64-836e-e991b73ee9d5 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 946.356634] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241038, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.447693] env[61898]: INFO nova.compute.manager [-] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Took 1.50 seconds to deallocate network for instance. [ 946.466304] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241041, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.550190] env[61898]: DEBUG nova.compute.manager [req-47ea3494-0c12-4ef7-8f43-48e6ee1ba3b0 req-4e9b7ae0-ef45-4b9f-873e-65dd1cb9aa0d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Received event network-changed-62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 946.550386] env[61898]: DEBUG nova.compute.manager [req-47ea3494-0c12-4ef7-8f43-48e6ee1ba3b0 req-4e9b7ae0-ef45-4b9f-873e-65dd1cb9aa0d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing instance network info cache due to event network-changed-62f1251d-f84b-4c28-ab74-971fef0d640f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 946.550712] env[61898]: DEBUG oslo_concurrency.lockutils [req-47ea3494-0c12-4ef7-8f43-48e6ee1ba3b0 req-4e9b7ae0-ef45-4b9f-873e-65dd1cb9aa0d service nova] Acquiring lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.550762] env[61898]: DEBUG oslo_concurrency.lockutils [req-47ea3494-0c12-4ef7-8f43-48e6ee1ba3b0 req-4e9b7ae0-ef45-4b9f-873e-65dd1cb9aa0d service nova] Acquired lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.550919] env[61898]: DEBUG nova.network.neutron [req-47ea3494-0c12-4ef7-8f43-48e6ee1ba3b0 req-4e9b7ae0-ef45-4b9f-873e-65dd1cb9aa0d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing network info cache for port 62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 946.568158] env[61898]: DEBUG nova.scheduler.client.report [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 946.855718] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241038, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.955817] env[61898]: DEBUG oslo_concurrency.lockutils [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.965772] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241041, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.074941] env[61898]: DEBUG oslo_concurrency.lockutils [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.900s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.078099] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.380s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.078099] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.078099] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 947.078099] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.370s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.081996] env[61898]: INFO nova.compute.claims [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.084268] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dddc25d-2fab-4a5b-980d-b7fde7e8fef6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.095935] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a11200b-7479-49a1-ba3f-0af0142c7d51 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.113068] env[61898]: INFO nova.scheduler.client.report [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Deleted allocations for instance b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4 [ 947.118994] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41ba55c9-536d-4e84-9a6b-0b4f3fdd9611 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.129440] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb19908-9f36-4a48-a479-71b848ac645a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.167700] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180019MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 947.167933] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.353865] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241038, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.569828} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.354360] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7d207fb3-39a4-452d-a133-40f06b6cc713/7d207fb3-39a4-452d-a133-40f06b6cc713.vmdk to [datastore2] 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1/5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 947.355278] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e1017d-ef19-405e-88c0-8710bc17ef6c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.380871] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1/5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1.vmdk or device None with type streamOptimized {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 947.381311] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49bd3563-ad91-44c7-ad3c-607cb7bf16e1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.402143] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 947.402143] env[61898]: value = "task-1241042" [ 947.402143] env[61898]: _type = "Task" [ 947.402143] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.415013] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.466722] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241041, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.849669} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.467141] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] a2ceed2d-be5e-4baa-b2a7-1116812e775d/a2ceed2d-be5e-4baa-b2a7-1116812e775d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 947.467290] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 947.467538] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-096651e1-c485-483f-9d14-0af516813586 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.477586] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 947.477586] env[61898]: value = "task-1241043" [ 947.477586] env[61898]: _type = "Task" [ 947.477586] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.486045] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241043, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.626299] env[61898]: DEBUG oslo_concurrency.lockutils [None req-475e899e-b4fa-4951-aac4-a44bf9d02d5f tempest-ServerRescueTestJSONUnderV235-864223680 tempest-ServerRescueTestJSONUnderV235-864223680-project-member] Lock "b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.739s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.725516] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.725778] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.726112] env[61898]: DEBUG nova.network.neutron [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 947.818198] env[61898]: DEBUG nova.network.neutron [req-47ea3494-0c12-4ef7-8f43-48e6ee1ba3b0 req-4e9b7ae0-ef45-4b9f-873e-65dd1cb9aa0d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updated VIF entry in instance network info cache for port 62f1251d-f84b-4c28-ab74-971fef0d640f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 947.818544] env[61898]: DEBUG nova.network.neutron [req-47ea3494-0c12-4ef7-8f43-48e6ee1ba3b0 req-4e9b7ae0-ef45-4b9f-873e-65dd1cb9aa0d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updating instance_info_cache with network_info: [{"id": "62f1251d-f84b-4c28-ab74-971fef0d640f", "address": "fa:16:3e:27:01:3e", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62f1251d-f8", "ovs_interfaceid": "62f1251d-f84b-4c28-ab74-971fef0d640f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.851164] env[61898]: DEBUG nova.compute.manager [req-72eb80da-4c87-4c8b-9c29-e79cdaefe192 req-14e8d3f3-d7dd-4dc9-945e-355bbc85d17a service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Received event network-vif-plugged-53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 947.851463] env[61898]: DEBUG oslo_concurrency.lockutils [req-72eb80da-4c87-4c8b-9c29-e79cdaefe192 req-14e8d3f3-d7dd-4dc9-945e-355bbc85d17a service nova] Acquiring lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.852181] env[61898]: DEBUG oslo_concurrency.lockutils [req-72eb80da-4c87-4c8b-9c29-e79cdaefe192 req-14e8d3f3-d7dd-4dc9-945e-355bbc85d17a service nova] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.853049] env[61898]: DEBUG oslo_concurrency.lockutils [req-72eb80da-4c87-4c8b-9c29-e79cdaefe192 req-14e8d3f3-d7dd-4dc9-945e-355bbc85d17a service nova] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.853049] env[61898]: DEBUG nova.compute.manager [req-72eb80da-4c87-4c8b-9c29-e79cdaefe192 req-14e8d3f3-d7dd-4dc9-945e-355bbc85d17a service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] No waiting events found dispatching network-vif-plugged-53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 947.853049] env[61898]: WARNING nova.compute.manager [req-72eb80da-4c87-4c8b-9c29-e79cdaefe192 req-14e8d3f3-d7dd-4dc9-945e-355bbc85d17a service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Received unexpected event network-vif-plugged-53a6375d-a9c3-4c2e-8568-942c3c43bf4a for instance with vm_state shelved_offloaded and task_state spawning. [ 947.918426] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241042, 'name': ReconfigVM_Task, 'duration_secs': 0.322951} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.918765] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1/5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1.vmdk or device None with type streamOptimized {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 947.919462] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c353e6dd-4206-4a77-ab08-fba543452b36 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.926188] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 947.926188] env[61898]: value = "task-1241044" [ 947.926188] env[61898]: _type = "Task" [ 947.926188] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.934238] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241044, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.989014] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241043, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.212381} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.989330] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 947.990181] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75445548-8ed8-4643-ac81-fa0ec21979db {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.011919] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] a2ceed2d-be5e-4baa-b2a7-1116812e775d/a2ceed2d-be5e-4baa-b2a7-1116812e775d.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 948.012234] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ceec0e6-b54f-41e8-bfa5-5767d16363b6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.031826] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 948.031826] env[61898]: value = "task-1241045" [ 948.031826] env[61898]: _type = "Task" [ 948.031826] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.042031] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241045, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.319877] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fae42b6-28e9-4b57-8269-1d251d98366e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.322948] env[61898]: DEBUG oslo_concurrency.lockutils [req-47ea3494-0c12-4ef7-8f43-48e6ee1ba3b0 req-4e9b7ae0-ef45-4b9f-873e-65dd1cb9aa0d service nova] Releasing lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.328131] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7cdac1-cf28-4a11-8d1a-b9368356b633 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.361934] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3ba28e-a63e-40cc-9b0c-cb700dc6f6e9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.369745] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261d9b1c-7246-4747-b330-681f008552f1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.389197] env[61898]: DEBUG nova.compute.provider_tree [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.437872] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241044, 'name': Rename_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.541477] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241045, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.588553] env[61898]: DEBUG nova.compute.manager [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Received event network-changed-4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 948.588776] env[61898]: DEBUG nova.compute.manager [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Refreshing instance network info cache due to event network-changed-4bed7107-cc7d-431f-a835-84a51f188455. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 948.589547] env[61898]: DEBUG oslo_concurrency.lockutils [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] Acquiring lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.589547] env[61898]: DEBUG oslo_concurrency.lockutils [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] Acquired lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.589547] env[61898]: DEBUG nova.network.neutron [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Refreshing network info cache for port 4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 948.631657] env[61898]: DEBUG nova.network.neutron [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updating instance_info_cache with network_info: [{"id": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "address": "fa:16:3e:be:2b:9a", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53a6375d-a9", "ovs_interfaceid": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.633563] env[61898]: DEBUG oslo_concurrency.lockutils [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "interface-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-1b2175da-a7e5-4786-a4f6-780fb83f447c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.633818] env[61898]: DEBUG oslo_concurrency.lockutils [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-1b2175da-a7e5-4786-a4f6-780fb83f447c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.634208] env[61898]: DEBUG nova.objects.instance [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'flavor' on Instance uuid bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.892491] env[61898]: DEBUG nova.scheduler.client.report [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 948.937755] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241044, 'name': Rename_Task, 'duration_secs': 0.864847} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.937852] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 948.938085] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-046ff98d-e38a-4edb-bcc8-7a651273ea91 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.944944] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 948.944944] env[61898]: value = "task-1241046" [ 948.944944] env[61898]: _type = "Task" [ 948.944944] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.955104] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241046, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.042450] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241045, 'name': ReconfigVM_Task, 'duration_secs': 0.777438} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.042767] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Reconfigured VM instance instance-0000005e to attach disk [datastore2] a2ceed2d-be5e-4baa-b2a7-1116812e775d/a2ceed2d-be5e-4baa-b2a7-1116812e775d.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 949.045535] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8680914-45e0-41be-9834-334d5da6c4ce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.053141] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 949.053141] env[61898]: value = "task-1241047" [ 949.053141] env[61898]: _type = "Task" [ 949.053141] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.065382] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241047, 'name': Rename_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.134756] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.166033] env[61898]: DEBUG nova.virt.hardware [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1080bc6096360c3d941d8a0808a02ee5',container_format='bare',created_at=2024-10-10T12:00:55Z,direct_url=,disk_format='vmdk',id=038581bd-8ae3-45c6-8697-83c7fb01abff,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1445271470-shelved',owner='11539a8a92af4208a15e69afe3dc60e8',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2024-10-10T12:01:12Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 949.166204] env[61898]: DEBUG nova.virt.hardware [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 949.166291] env[61898]: DEBUG nova.virt.hardware [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 949.166582] env[61898]: DEBUG nova.virt.hardware [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 949.166682] env[61898]: DEBUG nova.virt.hardware [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 949.166863] env[61898]: DEBUG nova.virt.hardware [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 949.167244] env[61898]: DEBUG nova.virt.hardware [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 949.167244] env[61898]: DEBUG nova.virt.hardware [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 949.167473] env[61898]: DEBUG nova.virt.hardware [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 949.167559] env[61898]: DEBUG nova.virt.hardware [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 949.171021] env[61898]: DEBUG nova.virt.hardware [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 949.171021] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871b02d5-6eaf-4433-a906-6b5450cd30dc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.180724] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2aec483-238f-436f-9fa6-d3b82eac8501 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.194961] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:2b:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd69a4b11-8d65-435f-94a5-28f74a39a718', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53a6375d-a9c3-4c2e-8568-942c3c43bf4a', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.202307] env[61898]: DEBUG oslo.service.loopingcall [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.207612] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.207862] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4705cb1-bd35-4d30-8ca3-bc2cd39472e9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.227693] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.227693] env[61898]: value = "task-1241048" [ 949.227693] env[61898]: _type = "Task" [ 949.227693] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.235533] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241048, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.400077] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.400361] env[61898]: DEBUG nova.compute.manager [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 949.410020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.153s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.410020] env[61898]: DEBUG nova.objects.instance [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Lazy-loading 'resources' on Instance uuid 11ca5129-0dc3-44b3-8f7b-215c93dac764 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 949.417024] env[61898]: DEBUG nova.objects.instance [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'pci_requests' on Instance uuid bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 949.459025] env[61898]: DEBUG oslo_vmware.api [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241046, 'name': PowerOnVM_Task, 'duration_secs': 0.495454} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.459684] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 949.461130] env[61898]: INFO nova.compute.manager [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Took 17.20 seconds to spawn the instance on the hypervisor. [ 949.461130] env[61898]: DEBUG nova.compute.manager [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 949.461248] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6aa9bb5-077a-4f3e-87c4-e96279062e2d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.482536] env[61898]: DEBUG nova.network.neutron [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updated VIF entry in instance network info cache for port 4bed7107-cc7d-431f-a835-84a51f188455. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 949.483138] env[61898]: DEBUG nova.network.neutron [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updating instance_info_cache with network_info: [{"id": "4bed7107-cc7d-431f-a835-84a51f188455", "address": "fa:16:3e:44:b0:e6", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bed7107-cc", "ovs_interfaceid": "4bed7107-cc7d-431f-a835-84a51f188455", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.563097] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241047, 'name': Rename_Task, 'duration_secs': 0.140393} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.563534] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 949.563824] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-153c9e3d-d105-4979-93fb-1d929823fba9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.570329] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 949.570329] env[61898]: value = "task-1241049" [ 949.570329] env[61898]: _type = "Task" [ 949.570329] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.579696] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241049, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.738171] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241048, 'name': CreateVM_Task, 'duration_secs': 0.308982} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.738344] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 949.739069] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.739241] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired lock "[datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.739605] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 949.740265] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fda6e24-ce6c-4f98-868a-7a3d8159fc40 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.744655] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 949.744655] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d8b4b3-8a69-dffc-9011-7006e67cf4d1" [ 949.744655] env[61898]: _type = "Task" [ 949.744655] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.760926] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lock "[datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.761198] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Processing image 038581bd-8ae3-45c6-8697-83c7fb01abff {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 949.761433] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff/038581bd-8ae3-45c6-8697-83c7fb01abff.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.761711] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired lock "[datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff/038581bd-8ae3-45c6-8697-83c7fb01abff.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.761910] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 949.762177] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a17d9ddc-786e-49af-a667-db0ff35a60e8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.779757] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 949.779973] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 949.780740] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-112f1ca3-0a27-4ce9-9ae5-e05561d75b6b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.785903] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 949.785903] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e69a7f-e057-8c1f-897d-4abe8f2c92a8" [ 949.785903] env[61898]: _type = "Task" [ 949.785903] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.793661] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e69a7f-e057-8c1f-897d-4abe8f2c92a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.875355] env[61898]: DEBUG nova.compute.manager [req-484f55fc-4cb4-4718-9000-7eb77286cbf0 req-9b05f619-38bb-4562-a758-c784c950f3ac service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Received event network-changed-53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 949.875557] env[61898]: DEBUG nova.compute.manager [req-484f55fc-4cb4-4718-9000-7eb77286cbf0 req-9b05f619-38bb-4562-a758-c784c950f3ac service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Refreshing instance network info cache due to event network-changed-53a6375d-a9c3-4c2e-8568-942c3c43bf4a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 949.875774] env[61898]: DEBUG oslo_concurrency.lockutils [req-484f55fc-4cb4-4718-9000-7eb77286cbf0 req-9b05f619-38bb-4562-a758-c784c950f3ac service nova] Acquiring lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.875921] env[61898]: DEBUG oslo_concurrency.lockutils [req-484f55fc-4cb4-4718-9000-7eb77286cbf0 req-9b05f619-38bb-4562-a758-c784c950f3ac service nova] Acquired lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.876093] env[61898]: DEBUG nova.network.neutron [req-484f55fc-4cb4-4718-9000-7eb77286cbf0 req-9b05f619-38bb-4562-a758-c784c950f3ac service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Refreshing network info cache for port 53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.910046] env[61898]: DEBUG nova.compute.utils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 949.911586] env[61898]: DEBUG nova.compute.manager [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 949.911852] env[61898]: DEBUG nova.network.neutron [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 949.920714] env[61898]: DEBUG nova.objects.base [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 949.920917] env[61898]: DEBUG nova.network.neutron [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 949.984240] env[61898]: INFO nova.compute.manager [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Took 36.16 seconds to build instance. [ 949.985876] env[61898]: DEBUG oslo_concurrency.lockutils [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] Releasing lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.987171] env[61898]: DEBUG nova.compute.manager [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Received event network-changed-4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 949.987171] env[61898]: DEBUG nova.compute.manager [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Refreshing instance network info cache due to event network-changed-4bed7107-cc7d-431f-a835-84a51f188455. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 949.987171] env[61898]: DEBUG oslo_concurrency.lockutils [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] Acquiring lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.987171] env[61898]: DEBUG oslo_concurrency.lockutils [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] Acquired lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.987171] env[61898]: DEBUG nova.network.neutron [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Refreshing network info cache for port 4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.993514] env[61898]: DEBUG nova.policy [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9e50ee88de9d4d67b7d4222dfe117256', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98ca09762c2e4b119437aa5b1a36e133', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 950.067018] env[61898]: DEBUG nova.policy [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080b70de4bd5465e8e696943b90f4ff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2c65efa327e403284ad2e78b3c7b7d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 950.080439] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241049, 'name': PowerOnVM_Task} progress is 87%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.151094] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7840a493-319a-47c0-a9b3-1c410f67e6de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.160645] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b163b8d1-ca6f-446a-b366-8759747e56a4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.190756] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22dc3f00-ed74-4c79-a63e-94bc4a5374b8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.198415] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4692472b-6093-4b86-89ec-05c583739d3e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.213171] env[61898]: DEBUG nova.compute.provider_tree [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.299192] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Preparing fetch location {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 950.299474] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Fetch image to [datastore1] OSTACK_IMG_85ad07f7-8a87-4743-9b9e-c955ee60fc4c/OSTACK_IMG_85ad07f7-8a87-4743-9b9e-c955ee60fc4c.vmdk {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 950.299666] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Downloading stream optimized image 038581bd-8ae3-45c6-8697-83c7fb01abff to [datastore1] OSTACK_IMG_85ad07f7-8a87-4743-9b9e-c955ee60fc4c/OSTACK_IMG_85ad07f7-8a87-4743-9b9e-c955ee60fc4c.vmdk on the data store datastore1 as vApp {{(pid=61898) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 950.299879] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Downloading image file data 038581bd-8ae3-45c6-8697-83c7fb01abff to the ESX as VM named 'OSTACK_IMG_85ad07f7-8a87-4743-9b9e-c955ee60fc4c' {{(pid=61898) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 950.381264] env[61898]: DEBUG oslo_vmware.rw_handles [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 950.381264] env[61898]: value = "resgroup-9" [ 950.381264] env[61898]: _type = "ResourcePool" [ 950.381264] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 950.381562] env[61898]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-14b16e14-6acc-41e0-9171-9029a12be4ff {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.405144] env[61898]: DEBUG oslo_vmware.rw_handles [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lease: (returnval){ [ 950.405144] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f592ef-028e-2384-6727-1a44a5aa70dd" [ 950.405144] env[61898]: _type = "HttpNfcLease" [ 950.405144] env[61898]: } obtained for vApp import into resource pool (val){ [ 950.405144] env[61898]: value = "resgroup-9" [ 950.405144] env[61898]: _type = "ResourcePool" [ 950.405144] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 950.405144] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the lease: (returnval){ [ 950.405144] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f592ef-028e-2384-6727-1a44a5aa70dd" [ 950.405144] env[61898]: _type = "HttpNfcLease" [ 950.405144] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 950.414574] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 950.414574] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f592ef-028e-2384-6727-1a44a5aa70dd" [ 950.414574] env[61898]: _type = "HttpNfcLease" [ 950.414574] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 950.417300] env[61898]: DEBUG nova.compute.manager [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 950.485084] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7726c5ba-ec65-4323-90fc-bcb6a74277d3 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.681s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.584010] env[61898]: DEBUG oslo_vmware.api [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241049, 'name': PowerOnVM_Task, 'duration_secs': 1.00334} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.584299] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 950.584513] env[61898]: INFO nova.compute.manager [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Took 11.09 seconds to spawn the instance on the hypervisor. [ 950.584695] env[61898]: DEBUG nova.compute.manager [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 950.585484] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d93a9dc-3337-4974-92ca-197cef614be8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.716214] env[61898]: DEBUG nova.scheduler.client.report [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 950.734177] env[61898]: DEBUG nova.network.neutron [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updated VIF entry in instance network info cache for port 4bed7107-cc7d-431f-a835-84a51f188455. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.734545] env[61898]: DEBUG nova.network.neutron [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updating instance_info_cache with network_info: [{"id": "4bed7107-cc7d-431f-a835-84a51f188455", "address": "fa:16:3e:44:b0:e6", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bed7107-cc", "ovs_interfaceid": "4bed7107-cc7d-431f-a835-84a51f188455", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.912611] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 950.912611] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f592ef-028e-2384-6727-1a44a5aa70dd" [ 950.912611] env[61898]: _type = "HttpNfcLease" [ 950.912611] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 950.913475] env[61898]: DEBUG nova.network.neutron [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Successfully created port: 508f7b5a-a3ef-4688-9918-45d566ba903a {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 950.964998] env[61898]: DEBUG nova.network.neutron [req-484f55fc-4cb4-4718-9000-7eb77286cbf0 req-9b05f619-38bb-4562-a758-c784c950f3ac service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updated VIF entry in instance network info cache for port 53a6375d-a9c3-4c2e-8568-942c3c43bf4a. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 950.965652] env[61898]: DEBUG nova.network.neutron [req-484f55fc-4cb4-4718-9000-7eb77286cbf0 req-9b05f619-38bb-4562-a758-c784c950f3ac service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updating instance_info_cache with network_info: [{"id": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "address": "fa:16:3e:be:2b:9a", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53a6375d-a9", "ovs_interfaceid": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.101744] env[61898]: INFO nova.compute.manager [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Took 26.99 seconds to build instance. [ 951.222103] env[61898]: DEBUG oslo_concurrency.lockutils [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.814s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.223346] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 12.030s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.236540] env[61898]: DEBUG oslo_concurrency.lockutils [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] Releasing lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.236885] env[61898]: DEBUG nova.compute.manager [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Received event network-changed-62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 951.237097] env[61898]: DEBUG nova.compute.manager [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing instance network info cache due to event network-changed-62f1251d-f84b-4c28-ab74-971fef0d640f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 951.237321] env[61898]: DEBUG oslo_concurrency.lockutils [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] Acquiring lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.237468] env[61898]: DEBUG oslo_concurrency.lockutils [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] Acquired lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.237634] env[61898]: DEBUG nova.network.neutron [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing network info cache for port 62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 951.252761] env[61898]: INFO nova.scheduler.client.report [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Deleted allocations for instance 11ca5129-0dc3-44b3-8f7b-215c93dac764 [ 951.414123] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 951.414123] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f592ef-028e-2384-6727-1a44a5aa70dd" [ 951.414123] env[61898]: _type = "HttpNfcLease" [ 951.414123] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 951.430078] env[61898]: DEBUG nova.compute.manager [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 951.464339] env[61898]: DEBUG nova.virt.hardware [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 951.464621] env[61898]: DEBUG nova.virt.hardware [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 951.464784] env[61898]: DEBUG nova.virt.hardware [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.465008] env[61898]: DEBUG nova.virt.hardware [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 951.465782] env[61898]: DEBUG nova.virt.hardware [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.465978] env[61898]: DEBUG nova.virt.hardware [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 951.466256] env[61898]: DEBUG nova.virt.hardware [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 951.466370] env[61898]: DEBUG nova.virt.hardware [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 951.466547] env[61898]: DEBUG nova.virt.hardware [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 951.467184] env[61898]: DEBUG nova.virt.hardware [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 951.467184] env[61898]: DEBUG nova.virt.hardware [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 951.468060] env[61898]: DEBUG oslo_concurrency.lockutils [req-484f55fc-4cb4-4718-9000-7eb77286cbf0 req-9b05f619-38bb-4562-a758-c784c950f3ac service nova] Releasing lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.468451] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcdf79a5-3dbe-4001-97c2-3b24ae22d6b4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.476439] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2715b1d0-aa6a-459a-ab9f-6119c40a23b3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.604281] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5f092ad8-e798-4266-9b07-1952f11fd1a3 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.498s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.761790] env[61898]: DEBUG oslo_concurrency.lockutils [None req-306e1c3a-53f5-418e-b1f0-6d853bab9889 tempest-ServersTestFqdnHostnames-145738538 tempest-ServersTestFqdnHostnames-145738538-project-member] Lock "11ca5129-0dc3-44b3-8f7b-215c93dac764" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.246s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.821822] env[61898]: DEBUG oslo_concurrency.lockutils [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.822138] env[61898]: DEBUG oslo_concurrency.lockutils [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.822387] env[61898]: DEBUG oslo_concurrency.lockutils [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.822623] env[61898]: DEBUG oslo_concurrency.lockutils [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.822815] env[61898]: DEBUG oslo_concurrency.lockutils [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.825743] env[61898]: INFO nova.compute.manager [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Terminating instance [ 951.913870] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 951.913870] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f592ef-028e-2384-6727-1a44a5aa70dd" [ 951.913870] env[61898]: _type = "HttpNfcLease" [ 951.913870] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 951.916289] env[61898]: DEBUG oslo_vmware.rw_handles [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 951.916289] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f592ef-028e-2384-6727-1a44a5aa70dd" [ 951.916289] env[61898]: _type = "HttpNfcLease" [ 951.916289] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 951.917250] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07231070-3f8a-4c57-b0f4-9f80eb914d6f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.924134] env[61898]: DEBUG oslo_vmware.rw_handles [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524046eb-b61a-ece5-530b-83d423cb65ea/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 951.924354] env[61898]: DEBUG oslo_vmware.rw_handles [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524046eb-b61a-ece5-530b-83d423cb65ea/disk-0.vmdk. {{(pid=61898) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 951.991944] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7bc21357-7c9c-4cab-9372-4d3be1f84d38 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.004546] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5aef1a-6632-40e1-a08b-befb8a99fa93 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.012073] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5ac727-aa11-4194-aa4d-5d4d9afcac43 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.048665] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb25a53-d908-41b4-88fb-f8f84f71723b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.057015] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d00bb2d-f300-49f5-86b8-741a7c104b32 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.071981] env[61898]: DEBUG nova.compute.provider_tree [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.218654] env[61898]: DEBUG nova.network.neutron [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updated VIF entry in instance network info cache for port 62f1251d-f84b-4c28-ab74-971fef0d640f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 952.219156] env[61898]: DEBUG nova.network.neutron [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updating instance_info_cache with network_info: [{"id": "62f1251d-f84b-4c28-ab74-971fef0d640f", "address": "fa:16:3e:27:01:3e", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62f1251d-f8", "ovs_interfaceid": "62f1251d-f84b-4c28-ab74-971fef0d640f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.336570] env[61898]: DEBUG nova.compute.manager [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 952.336927] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 952.338228] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9157680f-f449-4c61-b69d-0001be553f17 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.348833] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.350747] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb1c8250-d23a-4d62-845b-f4ea6bbc0600 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.358687] env[61898]: DEBUG oslo_vmware.api [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 952.358687] env[61898]: value = "task-1241051" [ 952.358687] env[61898]: _type = "Task" [ 952.358687] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.369182] env[61898]: DEBUG oslo_vmware.api [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.522509] env[61898]: DEBUG nova.network.neutron [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Successfully updated port: 1b2175da-a7e5-4786-a4f6-780fb83f447c {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.577611] env[61898]: DEBUG nova.scheduler.client.report [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 952.636260] env[61898]: DEBUG nova.compute.manager [req-749c4998-b35e-4493-8f8e-bdb634d2f525 req-f8fc837d-c272-4d30-ab28-884d1d68ee7d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Received event network-vif-plugged-1b2175da-a7e5-4786-a4f6-780fb83f447c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 952.636620] env[61898]: DEBUG oslo_concurrency.lockutils [req-749c4998-b35e-4493-8f8e-bdb634d2f525 req-f8fc837d-c272-4d30-ab28-884d1d68ee7d service nova] Acquiring lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.637031] env[61898]: DEBUG oslo_concurrency.lockutils [req-749c4998-b35e-4493-8f8e-bdb634d2f525 req-f8fc837d-c272-4d30-ab28-884d1d68ee7d service nova] Lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.637578] env[61898]: DEBUG oslo_concurrency.lockutils [req-749c4998-b35e-4493-8f8e-bdb634d2f525 req-f8fc837d-c272-4d30-ab28-884d1d68ee7d service nova] Lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.637715] env[61898]: DEBUG nova.compute.manager [req-749c4998-b35e-4493-8f8e-bdb634d2f525 req-f8fc837d-c272-4d30-ab28-884d1d68ee7d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] No waiting events found dispatching network-vif-plugged-1b2175da-a7e5-4786-a4f6-780fb83f447c {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 952.637903] env[61898]: WARNING nova.compute.manager [req-749c4998-b35e-4493-8f8e-bdb634d2f525 req-f8fc837d-c272-4d30-ab28-884d1d68ee7d service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Received unexpected event network-vif-plugged-1b2175da-a7e5-4786-a4f6-780fb83f447c for instance with vm_state active and task_state None. [ 952.722666] env[61898]: DEBUG oslo_concurrency.lockutils [req-80838a86-9461-4995-8029-79c2b8b7a549 req-df4dc82c-7c52-46fe-a5d8-74f3f448fc3c service nova] Releasing lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.733068] env[61898]: DEBUG nova.network.neutron [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Successfully updated port: 508f7b5a-a3ef-4688-9918-45d566ba903a {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.872223] env[61898]: DEBUG oslo_vmware.api [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241051, 'name': PowerOffVM_Task, 'duration_secs': 0.433125} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.874304] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 952.874558] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 952.874782] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cfb20cca-afdd-4f56-8136-10861e8403dc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.959436] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 952.959724] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 952.960183] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleting the datastore file [datastore2] 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 952.960476] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75734bbb-8be6-426e-9aad-1b37e0093cfc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.966790] env[61898]: DEBUG oslo_vmware.api [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 952.966790] env[61898]: value = "task-1241053" [ 952.966790] env[61898]: _type = "Task" [ 952.966790] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.974815] env[61898]: DEBUG oslo_vmware.api [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.024996] env[61898]: DEBUG oslo_concurrency.lockutils [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.025318] env[61898]: DEBUG oslo_concurrency.lockutils [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.025550] env[61898]: DEBUG nova.network.neutron [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.241026] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "refresh_cache-622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.241026] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquired lock "refresh_cache-622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.241026] env[61898]: DEBUG nova.network.neutron [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.271182] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.271182] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.271182] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 953.271182] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.271182] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.272767] env[61898]: INFO nova.compute.manager [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Terminating instance [ 953.462026] env[61898]: DEBUG oslo_vmware.rw_handles [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Completed reading data from the image iterator. {{(pid=61898) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 953.462026] env[61898]: DEBUG oslo_vmware.rw_handles [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524046eb-b61a-ece5-530b-83d423cb65ea/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 953.463179] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cde0ee-0cb7-40fc-a2b1-63d06f94a3ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.475818] env[61898]: DEBUG oslo_vmware.rw_handles [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524046eb-b61a-ece5-530b-83d423cb65ea/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 953.476258] env[61898]: DEBUG oslo_vmware.rw_handles [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524046eb-b61a-ece5-530b-83d423cb65ea/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 953.481121] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-2a4fe652-52af-4ec8-9605-e6b611911f4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.482860] env[61898]: DEBUG oslo_vmware.api [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215784} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.483138] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 953.483664] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 953.483664] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 953.485043] env[61898]: INFO nova.compute.manager [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 953.485043] env[61898]: DEBUG oslo.service.loopingcall [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 953.485367] env[61898]: DEBUG nova.compute.manager [-] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 953.485367] env[61898]: DEBUG nova.network.neutron [-] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 953.588809] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.365s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.590454] env[61898]: DEBUG nova.compute.manager [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=61898) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5238}} [ 953.592545] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.625s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.593722] env[61898]: INFO nova.compute.claims [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 953.615075] env[61898]: WARNING nova.network.neutron [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] 89882853-88ec-48f1-a883-3be9e65f9fd8 already exists in list: networks containing: ['89882853-88ec-48f1-a883-3be9e65f9fd8']. ignoring it [ 953.657025] env[61898]: DEBUG oslo_vmware.rw_handles [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524046eb-b61a-ece5-530b-83d423cb65ea/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 953.657025] env[61898]: INFO nova.virt.vmwareapi.images [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Downloaded image file data 038581bd-8ae3-45c6-8697-83c7fb01abff [ 953.657025] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255cd9c2-8a0b-43d8-89c2-111d098a6c78 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.675840] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ba79811-554e-4b28-aeef-0c5a807deea7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.706239] env[61898]: INFO nova.virt.vmwareapi.images [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] The imported VM was unregistered [ 953.708589] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Caching image {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 953.708909] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Creating directory with path [datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 953.711661] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2c823c2-ad61-4f3f-b286-bf7bc06c8cfc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.729586] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Created directory with path [datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 953.729865] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_85ad07f7-8a87-4743-9b9e-c955ee60fc4c/OSTACK_IMG_85ad07f7-8a87-4743-9b9e-c955ee60fc4c.vmdk to [datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff/038581bd-8ae3-45c6-8697-83c7fb01abff.vmdk. {{(pid=61898) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 953.730167] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-28b339b7-1e5b-4d10-a91b-1a6c52718c9b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.739842] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 953.739842] env[61898]: value = "task-1241055" [ 953.739842] env[61898]: _type = "Task" [ 953.739842] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.752219] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241055, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.780025] env[61898]: DEBUG nova.compute.manager [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 953.780025] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 953.780025] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63156dd0-921c-4519-b1f5-9a5224421e97 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.789018] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.789018] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82fe5477-7035-4817-8e2b-3c96b56bd03f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.794583] env[61898]: DEBUG oslo_vmware.api [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 953.794583] env[61898]: value = "task-1241056" [ 953.794583] env[61898]: _type = "Task" [ 953.794583] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.807494] env[61898]: DEBUG oslo_vmware.api [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241056, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.821493] env[61898]: DEBUG nova.network.neutron [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.193290] env[61898]: DEBUG nova.network.neutron [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Updating instance_info_cache with network_info: [{"id": "508f7b5a-a3ef-4688-9918-45d566ba903a", "address": "fa:16:3e:75:1f:00", "network": {"id": "a7782ec9-c3cb-41be-b52c-f40deb036970", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-870279198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ca09762c2e4b119437aa5b1a36e133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap508f7b5a-a3", "ovs_interfaceid": "508f7b5a-a3ef-4688-9918-45d566ba903a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.249703] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241055, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.263841] env[61898]: INFO nova.scheduler.client.report [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleted allocation for migration 349e2a4f-d827-4046-80e3-17c5d05dd026 [ 954.306457] env[61898]: DEBUG oslo_vmware.api [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241056, 'name': PowerOffVM_Task, 'duration_secs': 0.410653} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.306753] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.306992] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 954.307431] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b620a3fa-e938-4bd6-ae4e-586b07211762 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.315464] env[61898]: DEBUG nova.network.neutron [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updating instance_info_cache with network_info: [{"id": "62f1251d-f84b-4c28-ab74-971fef0d640f", "address": "fa:16:3e:27:01:3e", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62f1251d-f8", "ovs_interfaceid": "62f1251d-f84b-4c28-ab74-971fef0d640f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1b2175da-a7e5-4786-a4f6-780fb83f447c", "address": "fa:16:3e:c5:73:bd", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b2175da-a7", "ovs_interfaceid": "1b2175da-a7e5-4786-a4f6-780fb83f447c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.381024] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 954.381024] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 954.381024] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleting the datastore file [datastore2] a2ceed2d-be5e-4baa-b2a7-1116812e775d {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.381533] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31da253a-f8a3-4c26-8c1e-1d70a079d050 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.390082] env[61898]: DEBUG oslo_vmware.api [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 954.390082] env[61898]: value = "task-1241058" [ 954.390082] env[61898]: _type = "Task" [ 954.390082] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.399150] env[61898]: DEBUG oslo_vmware.api [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.658910] env[61898]: DEBUG nova.network.neutron [-] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.674055] env[61898]: DEBUG nova.compute.manager [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Received event network-changed-1b2175da-a7e5-4786-a4f6-780fb83f447c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 954.674234] env[61898]: DEBUG nova.compute.manager [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing instance network info cache due to event network-changed-1b2175da-a7e5-4786-a4f6-780fb83f447c. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 954.674444] env[61898]: DEBUG oslo_concurrency.lockutils [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] Acquiring lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.695571] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Releasing lock "refresh_cache-622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.696562] env[61898]: DEBUG nova.compute.manager [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Instance network_info: |[{"id": "508f7b5a-a3ef-4688-9918-45d566ba903a", "address": "fa:16:3e:75:1f:00", "network": {"id": "a7782ec9-c3cb-41be-b52c-f40deb036970", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-870279198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ca09762c2e4b119437aa5b1a36e133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap508f7b5a-a3", "ovs_interfaceid": "508f7b5a-a3ef-4688-9918-45d566ba903a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 954.696562] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:1f:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d054505-89d3-49c5-8b38-5da917a42c49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '508f7b5a-a3ef-4688-9918-45d566ba903a', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 954.704145] env[61898]: DEBUG oslo.service.loopingcall [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.707397] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 954.708111] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af8c9334-5d6f-4edd-920a-993a06fa01e6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.732206] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 954.732206] env[61898]: value = "task-1241059" [ 954.732206] env[61898]: _type = "Task" [ 954.732206] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.742732] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241059, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.751244] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241055, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.771616] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d2525d2e-65b2-4570-8b03-7a0abb8fcbb9 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 18.714s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.819841] env[61898]: DEBUG oslo_concurrency.lockutils [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.819841] env[61898]: DEBUG oslo_concurrency.lockutils [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.820073] env[61898]: DEBUG oslo_concurrency.lockutils [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.820315] env[61898]: DEBUG oslo_concurrency.lockutils [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] Acquired lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.820500] env[61898]: DEBUG nova.network.neutron [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing network info cache for port 1b2175da-a7e5-4786-a4f6-780fb83f447c {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.822450] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b50b55-20c3-471b-8b2c-8fbe038eba81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.844363] env[61898]: DEBUG nova.virt.hardware [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 954.844640] env[61898]: DEBUG nova.virt.hardware [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 954.844804] env[61898]: DEBUG nova.virt.hardware [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.844993] env[61898]: DEBUG nova.virt.hardware [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 954.845224] env[61898]: DEBUG nova.virt.hardware [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.845395] env[61898]: DEBUG nova.virt.hardware [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 954.845986] env[61898]: DEBUG nova.virt.hardware [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 954.845986] env[61898]: DEBUG nova.virt.hardware [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 954.845986] env[61898]: DEBUG nova.virt.hardware [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 954.846278] env[61898]: DEBUG nova.virt.hardware [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 954.846429] env[61898]: DEBUG nova.virt.hardware [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 954.854888] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Reconfiguring VM to attach interface {{(pid=61898) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 954.858277] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d8b1c65-92a5-4813-99e8-2f0b3a8e6def {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.878513] env[61898]: DEBUG oslo_vmware.api [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 954.878513] env[61898]: value = "task-1241060" [ 954.878513] env[61898]: _type = "Task" [ 954.878513] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.890128] env[61898]: DEBUG oslo_vmware.api [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241060, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.891907] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8be37c7-a92f-49c2-acfe-bebb2dd2bfda {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.903342] env[61898]: DEBUG oslo_vmware.api [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.371659} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.905588] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.905847] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 954.906124] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 954.906300] env[61898]: INFO nova.compute.manager [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 954.906937] env[61898]: DEBUG oslo.service.loopingcall [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.906937] env[61898]: DEBUG nova.compute.manager [-] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 954.907119] env[61898]: DEBUG nova.network.neutron [-] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 954.910192] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660e16e4-c8c1-4248-a742-bbb12af1dfba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.953166] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341b1b04-a8de-429a-8d8d-78626b8142cb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.961842] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c289312-063f-43e3-af54-40025dcc5afd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.976738] env[61898]: DEBUG nova.compute.provider_tree [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.162483] env[61898]: INFO nova.compute.manager [-] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Took 1.68 seconds to deallocate network for instance. [ 955.242620] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241059, 'name': CreateVM_Task, 'duration_secs': 0.378679} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.242864] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 955.246590] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.246800] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.247122] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 955.247765] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bd296e0-a61f-478b-9f26-8e5bd3b399d1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.253924] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 955.253924] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52662eb9-f2f4-a389-eeb2-2eb05772ee41" [ 955.253924] env[61898]: _type = "Task" [ 955.253924] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.258584] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241055, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.267715] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52662eb9-f2f4-a389-eeb2-2eb05772ee41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.392602] env[61898]: DEBUG oslo_vmware.api [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241060, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.482247] env[61898]: DEBUG nova.scheduler.client.report [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 955.672286] env[61898]: DEBUG oslo_concurrency.lockutils [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.697705] env[61898]: DEBUG nova.network.neutron [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updated VIF entry in instance network info cache for port 1b2175da-a7e5-4786-a4f6-780fb83f447c. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 955.698226] env[61898]: DEBUG nova.network.neutron [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updating instance_info_cache with network_info: [{"id": "62f1251d-f84b-4c28-ab74-971fef0d640f", "address": "fa:16:3e:27:01:3e", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62f1251d-f8", "ovs_interfaceid": "62f1251d-f84b-4c28-ab74-971fef0d640f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1b2175da-a7e5-4786-a4f6-780fb83f447c", "address": "fa:16:3e:c5:73:bd", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b2175da-a7", "ovs_interfaceid": "1b2175da-a7e5-4786-a4f6-780fb83f447c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.753659] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241055, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.768212] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52662eb9-f2f4-a389-eeb2-2eb05772ee41, 'name': SearchDatastore_Task, 'duration_secs': 0.092594} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.768554] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.768826] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 955.769112] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.769266] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.769451] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 955.769727] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41378f1f-ee70-40ee-9ffe-edb444cb313d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.786711] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 955.786978] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 955.787789] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a63b4a11-919a-4425-a19d-7b7dee89d93a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.790771] env[61898]: DEBUG nova.objects.instance [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'flavor' on Instance uuid 7c6aad92-6e91-48fc-89ae-5ee4c89f449c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.795736] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 955.795736] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e397f1-8261-c93c-6420-218d00b8bc12" [ 955.795736] env[61898]: _type = "Task" [ 955.795736] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.805772] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e397f1-8261-c93c-6420-218d00b8bc12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.855250] env[61898]: DEBUG nova.network.neutron [-] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.889744] env[61898]: DEBUG oslo_vmware.api [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241060, 'name': ReconfigVM_Task, 'duration_secs': 0.641933} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.890392] env[61898]: DEBUG oslo_concurrency.lockutils [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.890619] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Reconfigured VM to attach interface {{(pid=61898) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 955.989428] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.397s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.989973] env[61898]: DEBUG nova.compute.manager [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 955.997025] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.137s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.997025] env[61898]: DEBUG nova.objects.instance [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lazy-loading 'resources' on Instance uuid 47208ebd-8407-4d00-8378-adb0a4a21c2a {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.201928] env[61898]: DEBUG oslo_concurrency.lockutils [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] Releasing lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.202136] env[61898]: DEBUG nova.compute.manager [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Received event network-vif-plugged-508f7b5a-a3ef-4688-9918-45d566ba903a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 956.202383] env[61898]: DEBUG oslo_concurrency.lockutils [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] Acquiring lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.202693] env[61898]: DEBUG oslo_concurrency.lockutils [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.202918] env[61898]: DEBUG oslo_concurrency.lockutils [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.203132] env[61898]: DEBUG nova.compute.manager [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] No waiting events found dispatching network-vif-plugged-508f7b5a-a3ef-4688-9918-45d566ba903a {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 956.203321] env[61898]: WARNING nova.compute.manager [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Received unexpected event network-vif-plugged-508f7b5a-a3ef-4688-9918-45d566ba903a for instance with vm_state building and task_state spawning. [ 956.203488] env[61898]: DEBUG nova.compute.manager [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Received event network-changed-508f7b5a-a3ef-4688-9918-45d566ba903a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 956.203719] env[61898]: DEBUG nova.compute.manager [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Refreshing instance network info cache due to event network-changed-508f7b5a-a3ef-4688-9918-45d566ba903a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 956.203991] env[61898]: DEBUG oslo_concurrency.lockutils [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] Acquiring lock "refresh_cache-622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.204887] env[61898]: DEBUG oslo_concurrency.lockutils [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] Acquired lock "refresh_cache-622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.205228] env[61898]: DEBUG nova.network.neutron [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Refreshing network info cache for port 508f7b5a-a3ef-4688-9918-45d566ba903a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.256066] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241055, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.297719] env[61898]: DEBUG oslo_concurrency.lockutils [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.298376] env[61898]: DEBUG oslo_concurrency.lockutils [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.299338] env[61898]: DEBUG nova.network.neutron [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.299633] env[61898]: DEBUG nova.objects.instance [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'info_cache' on Instance uuid 7c6aad92-6e91-48fc-89ae-5ee4c89f449c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.317275] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e397f1-8261-c93c-6420-218d00b8bc12, 'name': SearchDatastore_Task, 'duration_secs': 0.08666} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.320030] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1657fce0-7201-464f-9adc-462c0480dd00 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.328678] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 956.328678] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52dde377-c445-e948-6297-eb466f57f5f0" [ 956.328678] env[61898]: _type = "Task" [ 956.328678] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.340867] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52dde377-c445-e948-6297-eb466f57f5f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.358110] env[61898]: INFO nova.compute.manager [-] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Took 1.45 seconds to deallocate network for instance. [ 956.397942] env[61898]: DEBUG oslo_concurrency.lockutils [None req-15a0734a-0623-48fb-8560-2b8beb4339b6 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-1b2175da-a7e5-4786-a4f6-780fb83f447c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.762s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.497395] env[61898]: DEBUG nova.compute.utils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 956.502020] env[61898]: DEBUG nova.compute.manager [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 956.502020] env[61898]: DEBUG nova.network.neutron [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 956.546826] env[61898]: DEBUG nova.policy [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a910d0cdf3cd4b17af818abd25a38b79', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ce0562f486e44cc877c1cc31525a13a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 956.740944] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5247e13-d67b-41a1-aafb-f4aa66396362 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.758237] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad484ae2-4e8b-4284-aa21-08c87187a71a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.762315] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241055, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.982917} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.762587] env[61898]: INFO nova.virt.vmwareapi.ds_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_85ad07f7-8a87-4743-9b9e-c955ee60fc4c/OSTACK_IMG_85ad07f7-8a87-4743-9b9e-c955ee60fc4c.vmdk to [datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff/038581bd-8ae3-45c6-8697-83c7fb01abff.vmdk. [ 956.763163] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Cleaning up location [datastore1] OSTACK_IMG_85ad07f7-8a87-4743-9b9e-c955ee60fc4c {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 956.763163] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_85ad07f7-8a87-4743-9b9e-c955ee60fc4c {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.764017] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebf3b336-8390-4911-aa50-7032b2beec81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.798500] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d871678-43f1-4476-b477-7d00b4d2ecb5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.801320] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 956.801320] env[61898]: value = "task-1241061" [ 956.801320] env[61898]: _type = "Task" [ 956.801320] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.809089] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e8c34e-e79c-4735-b12d-eb9416979de3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.818890] env[61898]: DEBUG nova.objects.base [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Object Instance<7c6aad92-6e91-48fc-89ae-5ee4c89f449c> lazy-loaded attributes: flavor,info_cache {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 956.820161] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241061, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033903} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.820975] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 956.821202] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lock "[datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff/038581bd-8ae3-45c6-8697-83c7fb01abff.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.821683] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff/038581bd-8ae3-45c6-8697-83c7fb01abff.vmdk to [datastore1] 45b8dc91-b577-4548-bf3a-32c7c936c616/45b8dc91-b577-4548-bf3a-32c7c936c616.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 956.821952] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19f10a56-3e80-411b-a7d1-59d4358d4900 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.833954] env[61898]: DEBUG nova.compute.provider_tree [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.838036] env[61898]: DEBUG nova.compute.manager [req-353b5ef6-41de-4c76-a6b7-1c454a7f2420 req-eef88e01-9344-48fa-ae8d-68efff058880 service nova] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Received event network-vif-deleted-0e2a07e5-49cf-4f3c-8767-1535ccfb295b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 956.842267] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 956.842267] env[61898]: value = "task-1241062" [ 956.842267] env[61898]: _type = "Task" [ 956.842267] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.850835] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52dde377-c445-e948-6297-eb466f57f5f0, 'name': SearchDatastore_Task, 'duration_secs': 0.085432} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.851535] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.851841] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b/622326f9-b3c5-452e-b7f6-dfe6de1e7d4b.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 956.852140] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ceb4723b-005a-4530-bd08-f6976a72ff8a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.856998] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241062, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.861521] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 956.861521] env[61898]: value = "task-1241063" [ 956.861521] env[61898]: _type = "Task" [ 956.861521] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.866933] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.871669] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.006836] env[61898]: DEBUG nova.compute.manager [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 957.175435] env[61898]: DEBUG nova.network.neutron [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Successfully created port: fa47b33a-e279-408b-bcd7-9165ff102179 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 957.209703] env[61898]: DEBUG nova.network.neutron [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Updated VIF entry in instance network info cache for port 508f7b5a-a3ef-4688-9918-45d566ba903a. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 957.209703] env[61898]: DEBUG nova.network.neutron [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Updating instance_info_cache with network_info: [{"id": "508f7b5a-a3ef-4688-9918-45d566ba903a", "address": "fa:16:3e:75:1f:00", "network": {"id": "a7782ec9-c3cb-41be-b52c-f40deb036970", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-870279198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ca09762c2e4b119437aa5b1a36e133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap508f7b5a-a3", "ovs_interfaceid": "508f7b5a-a3ef-4688-9918-45d566ba903a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.339178] env[61898]: DEBUG nova.scheduler.client.report [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 957.352676] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241062, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.371489] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241063, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.711664] env[61898]: DEBUG oslo_concurrency.lockutils [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] Releasing lock "refresh_cache-622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.711976] env[61898]: DEBUG nova.compute.manager [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Received event network-vif-deleted-e508f4ea-8f2b-492f-b0e2-3eb68afaaa15 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 957.712268] env[61898]: INFO nova.compute.manager [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Neutron deleted interface e508f4ea-8f2b-492f-b0e2-3eb68afaaa15; detaching it from the instance and deleting it from the info cache [ 957.712921] env[61898]: DEBUG nova.network.neutron [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.844269] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.851s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.849085] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.854s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.849085] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.850258] env[61898]: DEBUG oslo_concurrency.lockutils [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.895s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.854027] env[61898]: DEBUG nova.objects.instance [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lazy-loading 'resources' on Instance uuid 1aa03975-f18f-4e64-836e-e991b73ee9d5 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.860632] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241062, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.872188] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241063, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.950617} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.872334] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b/622326f9-b3c5-452e-b7f6-dfe6de1e7d4b.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 957.872462] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 957.874131] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-595233d5-0dff-47d9-873d-5a5e3a8c98cf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.882498] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 957.882498] env[61898]: value = "task-1241064" [ 957.882498] env[61898]: _type = "Task" [ 957.882498] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.894365] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241064, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.899830] env[61898]: INFO nova.scheduler.client.report [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleted allocations for instance 47208ebd-8407-4d00-8378-adb0a4a21c2a [ 957.902833] env[61898]: INFO nova.scheduler.client.report [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted allocations for instance 5323b250-fad8-4d71-81ed-c5e5eeb8aeab [ 957.994980] env[61898]: DEBUG nova.network.neutron [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance_info_cache with network_info: [{"id": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "address": "fa:16:3e:42:f4:b3", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f33f2c4-46", "ovs_interfaceid": "9f33f2c4-4626-4230-90ea-e91c5f0da486", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.019465] env[61898]: DEBUG nova.compute.manager [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 958.055281] env[61898]: DEBUG nova.virt.hardware [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 958.055281] env[61898]: DEBUG nova.virt.hardware [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 958.055281] env[61898]: DEBUG nova.virt.hardware [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.055281] env[61898]: DEBUG nova.virt.hardware [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 958.055281] env[61898]: DEBUG nova.virt.hardware [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.055281] env[61898]: DEBUG nova.virt.hardware [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 958.055281] env[61898]: DEBUG nova.virt.hardware [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 958.055281] env[61898]: DEBUG nova.virt.hardware [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 958.055281] env[61898]: DEBUG nova.virt.hardware [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 958.055281] env[61898]: DEBUG nova.virt.hardware [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 958.055281] env[61898]: DEBUG nova.virt.hardware [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 958.056626] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97564152-39d5-4a97-b1f0-feab31b80e15 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.070603] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91bd5b4d-48a0-43d2-8373-a1742816da61 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.221941] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19c40845-abb6-4570-87c8-169a075b7cd7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.232639] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0a3666-8110-4cee-9cd3-fca202aec217 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.264616] env[61898]: DEBUG nova.compute.manager [req-f0672a46-39e3-4825-8f65-ed4fccbcc0eb req-52b52952-6caa-4c40-bfb0-6968fd1bc773 service nova] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Detach interface failed, port_id=e508f4ea-8f2b-492f-b0e2-3eb68afaaa15, reason: Instance 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 958.362188] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241062, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.395338] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241064, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.416688] env[61898]: DEBUG oslo_concurrency.lockutils [None req-4588d1aa-e3d0-4973-80a5-d224870ff829 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "47208ebd-8407-4d00-8378-adb0a4a21c2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.172s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.420991] env[61898]: DEBUG oslo_concurrency.lockutils [None req-42294139-8a75-4522-8c2a-42eda75da8c1 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "5323b250-fad8-4d71-81ed-c5e5eeb8aeab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.200s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.497082] env[61898]: DEBUG oslo_concurrency.lockutils [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-7c6aad92-6e91-48fc-89ae-5ee4c89f449c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.581896] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86627e94-0260-4f18-89bc-831d97589769 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.597707] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ac6838-9ee4-456c-87a3-09e98d7fcf2f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.643043] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf178e4-e2d4-4a8d-ba65-e8811313a77e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.651310] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b396d7-6d4b-413f-8c07-25dc5047614b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.665508] env[61898]: DEBUG nova.compute.provider_tree [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.684964] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquiring lock "06c894a2-9236-4534-922f-4255c6cf0531" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.685241] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Lock "06c894a2-9236-4534-922f-4255c6cf0531" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.697132] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "interface-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-1b2175da-a7e5-4786-a4f6-780fb83f447c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.697419] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-1b2175da-a7e5-4786-a4f6-780fb83f447c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.858425] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241062, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.895480] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241064, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.655478} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.895773] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 958.896821] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d36f049-7fd4-4eb5-a2a5-d8a2a0bee5c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.923491] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b/622326f9-b3c5-452e-b7f6-dfe6de1e7d4b.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.924295] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29987c85-3b33-418a-9f11-2550ee001f15 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.946692] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 958.946692] env[61898]: value = "task-1241065" [ 958.946692] env[61898]: _type = "Task" [ 958.946692] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.955059] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241065, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.169171] env[61898]: DEBUG nova.scheduler.client.report [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 959.189749] env[61898]: DEBUG nova.compute.manager [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 959.201345] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.201345] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.202274] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0587731b-dac0-481e-a28f-a9a662e7fc32 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.223289] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4325761d-7a80-4c80-96c3-4a8ceef73398 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.252968] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Reconfiguring VM to detach interface {{(pid=61898) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 959.253804] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b44c281b-2b2a-4d5f-a3db-d3f8936840af {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.281168] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 959.281168] env[61898]: value = "task-1241066" [ 959.281168] env[61898]: _type = "Task" [ 959.281168] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.288298] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.368760] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241062, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.448032] env[61898]: DEBUG nova.compute.manager [req-5ca46c3a-f896-4c4c-bafe-fc3d48fc77a6 req-29484f69-f8dc-4674-86a8-f2d7b4d2b9e8 service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Received event network-vif-plugged-fa47b33a-e279-408b-bcd7-9165ff102179 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 959.448316] env[61898]: DEBUG oslo_concurrency.lockutils [req-5ca46c3a-f896-4c4c-bafe-fc3d48fc77a6 req-29484f69-f8dc-4674-86a8-f2d7b4d2b9e8 service nova] Acquiring lock "c26c4add-728c-45ea-8465-7c4273b7d97b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.448842] env[61898]: DEBUG oslo_concurrency.lockutils [req-5ca46c3a-f896-4c4c-bafe-fc3d48fc77a6 req-29484f69-f8dc-4674-86a8-f2d7b4d2b9e8 service nova] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.449080] env[61898]: DEBUG oslo_concurrency.lockutils [req-5ca46c3a-f896-4c4c-bafe-fc3d48fc77a6 req-29484f69-f8dc-4674-86a8-f2d7b4d2b9e8 service nova] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.449389] env[61898]: DEBUG nova.compute.manager [req-5ca46c3a-f896-4c4c-bafe-fc3d48fc77a6 req-29484f69-f8dc-4674-86a8-f2d7b4d2b9e8 service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] No waiting events found dispatching network-vif-plugged-fa47b33a-e279-408b-bcd7-9165ff102179 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 959.449600] env[61898]: WARNING nova.compute.manager [req-5ca46c3a-f896-4c4c-bafe-fc3d48fc77a6 req-29484f69-f8dc-4674-86a8-f2d7b4d2b9e8 service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Received unexpected event network-vif-plugged-fa47b33a-e279-408b-bcd7-9165ff102179 for instance with vm_state building and task_state spawning. [ 959.461440] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241065, 'name': ReconfigVM_Task, 'duration_secs': 0.322853} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.461730] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b/622326f9-b3c5-452e-b7f6-dfe6de1e7d4b.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.464520] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3887614d-d00f-4027-b413-2d3055e3c04f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.469547] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 959.469547] env[61898]: value = "task-1241067" [ 959.469547] env[61898]: _type = "Task" [ 959.469547] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.480589] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241067, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.502910] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.503279] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83ac31f9-df5e-4044-9e72-747bbe375a28 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.509575] env[61898]: DEBUG oslo_vmware.api [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 959.509575] env[61898]: value = "task-1241068" [ 959.509575] env[61898]: _type = "Task" [ 959.509575] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.521772] env[61898]: DEBUG oslo_vmware.api [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241068, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.532889] env[61898]: DEBUG nova.network.neutron [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Successfully updated port: fa47b33a-e279-408b-bcd7-9165ff102179 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 959.679035] env[61898]: DEBUG oslo_concurrency.lockutils [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.829s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.681792] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 12.514s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.696680] env[61898]: INFO nova.scheduler.client.report [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Deleted allocations for instance 1aa03975-f18f-4e64-836e-e991b73ee9d5 [ 959.709535] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.789980] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.859460] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241062, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.617109} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.859460] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/038581bd-8ae3-45c6-8697-83c7fb01abff/038581bd-8ae3-45c6-8697-83c7fb01abff.vmdk to [datastore1] 45b8dc91-b577-4548-bf3a-32c7c936c616/45b8dc91-b577-4548-bf3a-32c7c936c616.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.860198] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780e1b76-4f61-4c5f-8137-c55f4c8e1147 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.882924] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 45b8dc91-b577-4548-bf3a-32c7c936c616/45b8dc91-b577-4548-bf3a-32c7c936c616.vmdk or device None with type streamOptimized {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.884026] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5185b301-c932-4266-8afa-8a213d02b359 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.902640] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 959.902640] env[61898]: value = "task-1241069" [ 959.902640] env[61898]: _type = "Task" [ 959.902640] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.911074] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241069, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.950733] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.951052] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.978687] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241067, 'name': Rename_Task, 'duration_secs': 0.141336} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.979907] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.979907] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2c3f47b-c5dd-46b0-8a2c-084f0f2b231b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.985466] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 959.985466] env[61898]: value = "task-1241070" [ 959.985466] env[61898]: _type = "Task" [ 959.985466] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.992901] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241070, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.019702] env[61898]: DEBUG oslo_vmware.api [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241068, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.034967] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.035146] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.035278] env[61898]: DEBUG nova.network.neutron [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 960.205604] env[61898]: DEBUG oslo_concurrency.lockutils [None req-140634e6-1db7-4753-b5e5-898f2284dafd tempest-ServersAdminTestJSON-1000570503 tempest-ServersAdminTestJSON-1000570503-project-member] Lock "1aa03975-f18f-4e64-836e-e991b73ee9d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.907s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.290349] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.412622] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241069, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.453458] env[61898]: DEBUG nova.compute.manager [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 960.498154] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241070, 'name': PowerOnVM_Task} progress is 88%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.520995] env[61898]: DEBUG oslo_vmware.api [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241068, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.609646] env[61898]: DEBUG nova.network.neutron [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 960.724390] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 320577e5-f197-4f66-a94f-9b9ba2479325 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 960.724545] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance cf428138-4d0d-43bf-a654-06a62a82c9a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 960.724674] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 070bc0cc-ff77-48b8-bd08-f17fe69e25af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 960.724792] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 960.724905] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance cd1335b7-78b7-4cea-add7-dd69736067b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 960.725023] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 7c6aad92-6e91-48fc-89ae-5ee4c89f449c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 960.725138] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 45b8dc91-b577-4548-bf3a-32c7c936c616 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 960.725273] env[61898]: WARNING nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 960.725386] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 4522f4ef-c8f6-4fe1-acd5-796f87f22839 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 960.725506] env[61898]: WARNING nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance a2ceed2d-be5e-4baa-b2a7-1116812e775d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 960.725617] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 960.725727] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance c26c4add-728c-45ea-8465-7c4273b7d97b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 960.792689] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.913327] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241069, 'name': ReconfigVM_Task, 'duration_secs': 0.676558} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.913692] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 45b8dc91-b577-4548-bf3a-32c7c936c616/45b8dc91-b577-4548-bf3a-32c7c936c616.vmdk or device None with type streamOptimized {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.914268] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa97bfac-2659-45a0-80cc-9ad0bbdc74e2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.920091] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 960.920091] env[61898]: value = "task-1241071" [ 960.920091] env[61898]: _type = "Task" [ 960.920091] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.927916] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241071, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.977094] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.995943] env[61898]: DEBUG oslo_vmware.api [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241070, 'name': PowerOnVM_Task, 'duration_secs': 0.687039} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.996714] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 960.996928] env[61898]: INFO nova.compute.manager [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Took 9.57 seconds to spawn the instance on the hypervisor. [ 960.997130] env[61898]: DEBUG nova.compute.manager [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 960.997948] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e40a0ab-f59f-4ac9-83e4-80563bc03f4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.019764] env[61898]: DEBUG oslo_vmware.api [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241068, 'name': PowerOnVM_Task, 'duration_secs': 1.404138} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.020076] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.020273] env[61898]: DEBUG nova.compute.manager [None req-27beea62-fd1a-4ace-b4b8-5686cd7a20ed tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 961.021038] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c763a135-5348-49c9-8b81-1a116b64673d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.193408] env[61898]: DEBUG nova.network.neutron [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance_info_cache with network_info: [{"id": "fa47b33a-e279-408b-bcd7-9165ff102179", "address": "fa:16:3e:82:72:df", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa47b33a-e2", "ovs_interfaceid": "fa47b33a-e279-408b-bcd7-9165ff102179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.228340] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 06c894a2-9236-4534-922f-4255c6cf0531 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 961.292818] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.429964] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241071, 'name': Rename_Task, 'duration_secs': 0.159641} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.430280] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.430534] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41efb53d-bc86-425e-8523-cfc3d39c2445 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.436328] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 961.436328] env[61898]: value = "task-1241072" [ 961.436328] env[61898]: _type = "Task" [ 961.436328] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.445720] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.478240] env[61898]: DEBUG nova.compute.manager [req-e3f99eb6-fd78-458d-9f04-8066ec9a42cf req-947e3157-d485-43c2-b55c-1a540fbbbe9b service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Received event network-changed-fa47b33a-e279-408b-bcd7-9165ff102179 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 961.478637] env[61898]: DEBUG nova.compute.manager [req-e3f99eb6-fd78-458d-9f04-8066ec9a42cf req-947e3157-d485-43c2-b55c-1a540fbbbe9b service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Refreshing instance network info cache due to event network-changed-fa47b33a-e279-408b-bcd7-9165ff102179. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 961.478737] env[61898]: DEBUG oslo_concurrency.lockutils [req-e3f99eb6-fd78-458d-9f04-8066ec9a42cf req-947e3157-d485-43c2-b55c-1a540fbbbe9b service nova] Acquiring lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 961.515315] env[61898]: INFO nova.compute.manager [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Took 29.82 seconds to build instance. [ 961.696368] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.696856] env[61898]: DEBUG nova.compute.manager [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Instance network_info: |[{"id": "fa47b33a-e279-408b-bcd7-9165ff102179", "address": "fa:16:3e:82:72:df", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa47b33a-e2", "ovs_interfaceid": "fa47b33a-e279-408b-bcd7-9165ff102179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 961.697239] env[61898]: DEBUG oslo_concurrency.lockutils [req-e3f99eb6-fd78-458d-9f04-8066ec9a42cf req-947e3157-d485-43c2-b55c-1a540fbbbe9b service nova] Acquired lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.697469] env[61898]: DEBUG nova.network.neutron [req-e3f99eb6-fd78-458d-9f04-8066ec9a42cf req-947e3157-d485-43c2-b55c-1a540fbbbe9b service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Refreshing network info cache for port fa47b33a-e279-408b-bcd7-9165ff102179 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 961.700087] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:72:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa47b33a-e279-408b-bcd7-9165ff102179', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 961.707193] env[61898]: DEBUG oslo.service.loopingcall [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 961.707666] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 961.708459] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f8a8157-2251-444b-ad7a-ca06daed6690 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.731249] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 961.731493] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 961.731639] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 961.736415] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 961.736415] env[61898]: value = "task-1241073" [ 961.736415] env[61898]: _type = "Task" [ 961.736415] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.742550] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241073, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.793613] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.950810] env[61898]: DEBUG oslo_vmware.api [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241072, 'name': PowerOnVM_Task, 'duration_secs': 0.451978} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.951815] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.017431] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26c2dfeb-1827-4d12-ae53-944a5a9757cb tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.333s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.021951] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c990d39-7042-482c-88f3-9ed887bfded9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.029135] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094ecb4e-88bd-45d6-b342-d0e8c2e8346a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.065106] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0503a5dd-f567-46a3-9c69-86c62254feeb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.080374] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25d38c4-4add-4e5b-99b0-d05164bda2f4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.086881] env[61898]: DEBUG nova.compute.manager [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 962.088212] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93155c0-1809-456f-9af9-52f4e572f1a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.101695] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.245924] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241073, 'name': CreateVM_Task, 'duration_secs': 0.356138} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.246027] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 962.246668] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.246856] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.247273] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 962.248193] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-012f02ff-42b7-406b-9974-4f2065ef197d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.252505] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 962.252505] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52699e5d-1801-41bf-29a9-452d66d9b5eb" [ 962.252505] env[61898]: _type = "Task" [ 962.252505] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.260346] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52699e5d-1801-41bf-29a9-452d66d9b5eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.294216] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.468467] env[61898]: DEBUG nova.network.neutron [req-e3f99eb6-fd78-458d-9f04-8066ec9a42cf req-947e3157-d485-43c2-b55c-1a540fbbbe9b service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updated VIF entry in instance network info cache for port fa47b33a-e279-408b-bcd7-9165ff102179. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 962.468991] env[61898]: DEBUG nova.network.neutron [req-e3f99eb6-fd78-458d-9f04-8066ec9a42cf req-947e3157-d485-43c2-b55c-1a540fbbbe9b service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance_info_cache with network_info: [{"id": "fa47b33a-e279-408b-bcd7-9165ff102179", "address": "fa:16:3e:82:72:df", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa47b33a-e2", "ovs_interfaceid": "fa47b33a-e279-408b-bcd7-9165ff102179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.573203] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.573686] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.573884] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.574178] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.574373] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.577248] env[61898]: INFO nova.compute.manager [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Terminating instance [ 962.609298] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 962.617858] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a24c93f0-f8c7-4a35-a29a-bc8f80562804 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 36.165s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.765471] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52699e5d-1801-41bf-29a9-452d66d9b5eb, 'name': SearchDatastore_Task, 'duration_secs': 0.013619} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.766135] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.766389] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 962.766647] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 962.766817] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.767011] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 962.767288] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f98124ee-189c-4d93-9bea-92c23df08911 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.775402] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 962.775593] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 962.776325] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e33513f-514f-4c4b-a217-4ea9569378de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.781545] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 962.781545] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525b81ee-987f-8edf-e992-23a074b44ac9" [ 962.781545] env[61898]: _type = "Task" [ 962.781545] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.791572] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525b81ee-987f-8edf-e992-23a074b44ac9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.796324] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.976315] env[61898]: DEBUG oslo_concurrency.lockutils [req-e3f99eb6-fd78-458d-9f04-8066ec9a42cf req-947e3157-d485-43c2-b55c-1a540fbbbe9b service nova] Releasing lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.081279] env[61898]: DEBUG nova.compute.manager [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 963.081607] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 963.082690] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e47350-133c-4973-a206-4e8c086afc8a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.093841] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 963.094345] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb7c3fcd-923f-4b86-beec-a50c8b236115 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.103116] env[61898]: DEBUG oslo_vmware.api [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 963.103116] env[61898]: value = "task-1241074" [ 963.103116] env[61898]: _type = "Task" [ 963.103116] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.114944] env[61898]: DEBUG oslo_vmware.api [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241074, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.116787] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 963.116975] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.435s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.117259] env[61898]: DEBUG oslo_concurrency.lockutils [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.445s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.117445] env[61898]: DEBUG oslo_concurrency.lockutils [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.119499] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.253s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.119719] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.121613] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.412s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.123132] env[61898]: INFO nova.compute.claims [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 963.150931] env[61898]: INFO nova.scheduler.client.report [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleted allocations for instance 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1 [ 963.153312] env[61898]: INFO nova.scheduler.client.report [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted allocations for instance a2ceed2d-be5e-4baa-b2a7-1116812e775d [ 963.301193] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525b81ee-987f-8edf-e992-23a074b44ac9, 'name': SearchDatastore_Task, 'duration_secs': 0.020975} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.309388] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.309976] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aac04d96-3a32-47c8-bf30-1d00eeb4414f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.316734] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 963.316734] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5280e59f-c26e-3f49-6dc0-20cb0cedfc9b" [ 963.316734] env[61898]: _type = "Task" [ 963.316734] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.327703] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5280e59f-c26e-3f49-6dc0-20cb0cedfc9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.511242] env[61898]: DEBUG nova.compute.manager [req-23f3ce02-9de8-4798-af26-28b532442bc5 req-80c88841-a725-472e-9dac-c733d84e3586 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Received event network-changed-508f7b5a-a3ef-4688-9918-45d566ba903a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 963.511242] env[61898]: DEBUG nova.compute.manager [req-23f3ce02-9de8-4798-af26-28b532442bc5 req-80c88841-a725-472e-9dac-c733d84e3586 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Refreshing instance network info cache due to event network-changed-508f7b5a-a3ef-4688-9918-45d566ba903a. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 963.511242] env[61898]: DEBUG oslo_concurrency.lockutils [req-23f3ce02-9de8-4798-af26-28b532442bc5 req-80c88841-a725-472e-9dac-c733d84e3586 service nova] Acquiring lock "refresh_cache-622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.511677] env[61898]: DEBUG oslo_concurrency.lockutils [req-23f3ce02-9de8-4798-af26-28b532442bc5 req-80c88841-a725-472e-9dac-c733d84e3586 service nova] Acquired lock "refresh_cache-622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.512421] env[61898]: DEBUG nova.network.neutron [req-23f3ce02-9de8-4798-af26-28b532442bc5 req-80c88841-a725-472e-9dac-c733d84e3586 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Refreshing network info cache for port 508f7b5a-a3ef-4688-9918-45d566ba903a {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.614817] env[61898]: DEBUG oslo_vmware.api [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241074, 'name': PowerOffVM_Task, 'duration_secs': 0.17601} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.615118] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.615289] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 963.615591] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-310d6638-618c-4594-917e-9a52570a67f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.665522] env[61898]: DEBUG oslo_concurrency.lockutils [None req-559d95c4-fee9-4857-bb52-17bcb0a09260 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.843s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.666663] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9ff90a8b-4164-4ef9-be36-8e93e14562c6 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "a2ceed2d-be5e-4baa-b2a7-1116812e775d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.398s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.718592] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 963.718887] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 963.719087] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleting the datastore file [datastore2] 7c6aad92-6e91-48fc-89ae-5ee4c89f449c {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.719862] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03666e05-d137-4407-8bda-1c71fbaf1b94 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.726511] env[61898]: DEBUG oslo_vmware.api [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 963.726511] env[61898]: value = "task-1241076" [ 963.726511] env[61898]: _type = "Task" [ 963.726511] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.734967] env[61898]: DEBUG oslo_vmware.api [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.798798] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.827629] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5280e59f-c26e-3f49-6dc0-20cb0cedfc9b, 'name': SearchDatastore_Task, 'duration_secs': 0.010046} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.827973] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.828848] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] c26c4add-728c-45ea-8465-7c4273b7d97b/c26c4add-728c-45ea-8465-7c4273b7d97b.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 963.828848] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7422444a-0270-4567-aa46-5d5c52ce0aed {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.836323] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 963.836323] env[61898]: value = "task-1241077" [ 963.836323] env[61898]: _type = "Task" [ 963.836323] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.844742] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.238951] env[61898]: DEBUG oslo_vmware.api [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241076, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.300280] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.334317] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f8a60a-f88c-45e8-9b1e-9dcd4c1c889b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.348430] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76baed33-2961-4026-bee3-fb579aa6c738 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.351944] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241077, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.379890] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34dd8203-e4bc-4360-ba38-3c9798eaebb1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.391150] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ccb088-2819-4b99-8330-9ef171273269 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.404978] env[61898]: DEBUG nova.compute.provider_tree [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 964.470955] env[61898]: DEBUG nova.network.neutron [req-23f3ce02-9de8-4798-af26-28b532442bc5 req-80c88841-a725-472e-9dac-c733d84e3586 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Updated VIF entry in instance network info cache for port 508f7b5a-a3ef-4688-9918-45d566ba903a. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 964.471538] env[61898]: DEBUG nova.network.neutron [req-23f3ce02-9de8-4798-af26-28b532442bc5 req-80c88841-a725-472e-9dac-c733d84e3586 service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Updating instance_info_cache with network_info: [{"id": "508f7b5a-a3ef-4688-9918-45d566ba903a", "address": "fa:16:3e:75:1f:00", "network": {"id": "a7782ec9-c3cb-41be-b52c-f40deb036970", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-870279198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98ca09762c2e4b119437aa5b1a36e133", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap508f7b5a-a3", "ovs_interfaceid": "508f7b5a-a3ef-4688-9918-45d566ba903a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.656918] env[61898]: DEBUG oslo_concurrency.lockutils [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "cf428138-4d0d-43bf-a654-06a62a82c9a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.657253] env[61898]: DEBUG oslo_concurrency.lockutils [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "cf428138-4d0d-43bf-a654-06a62a82c9a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.657484] env[61898]: DEBUG oslo_concurrency.lockutils [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "cf428138-4d0d-43bf-a654-06a62a82c9a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.657713] env[61898]: DEBUG oslo_concurrency.lockutils [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "cf428138-4d0d-43bf-a654-06a62a82c9a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.657931] env[61898]: DEBUG oslo_concurrency.lockutils [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "cf428138-4d0d-43bf-a654-06a62a82c9a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.660552] env[61898]: INFO nova.compute.manager [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Terminating instance [ 964.726757] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "cd1335b7-78b7-4cea-add7-dd69736067b0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.726757] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "cd1335b7-78b7-4cea-add7-dd69736067b0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.726757] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "cd1335b7-78b7-4cea-add7-dd69736067b0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.726757] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "cd1335b7-78b7-4cea-add7-dd69736067b0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.726757] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "cd1335b7-78b7-4cea-add7-dd69736067b0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.727590] env[61898]: INFO nova.compute.manager [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Terminating instance [ 964.739316] env[61898]: DEBUG oslo_vmware.api [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241076, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.6326} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.743503] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 964.743503] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 964.743503] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 964.743503] env[61898]: INFO nova.compute.manager [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Took 1.66 seconds to destroy the instance on the hypervisor. [ 964.743503] env[61898]: DEBUG oslo.service.loopingcall [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 964.743503] env[61898]: DEBUG nova.compute.manager [-] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 964.743503] env[61898]: DEBUG nova.network.neutron [-] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 964.801980] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.846529] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241077, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.889088} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.846918] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] c26c4add-728c-45ea-8465-7c4273b7d97b/c26c4add-728c-45ea-8465-7c4273b7d97b.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 964.847561] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 964.847561] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56e64947-a78f-4558-bde4-4807ce5c322e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.855167] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 964.855167] env[61898]: value = "task-1241078" [ 964.855167] env[61898]: _type = "Task" [ 964.855167] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.862749] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241078, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.937956] env[61898]: ERROR nova.scheduler.client.report [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [req-a07d0575-3fae-42f4-ada0-89c443756209] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 79886f75-94e9-4bf0-9cbd-87f3715d3144. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a07d0575-3fae-42f4-ada0-89c443756209"}]} [ 964.963254] env[61898]: DEBUG nova.scheduler.client.report [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Refreshing inventories for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 964.974359] env[61898]: DEBUG oslo_concurrency.lockutils [req-23f3ce02-9de8-4798-af26-28b532442bc5 req-80c88841-a725-472e-9dac-c733d84e3586 service nova] Releasing lock "refresh_cache-622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.986361] env[61898]: DEBUG nova.scheduler.client.report [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Updating ProviderTree inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 964.986666] env[61898]: DEBUG nova.compute.provider_tree [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 965.001157] env[61898]: DEBUG nova.scheduler.client.report [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Refreshing aggregate associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, aggregates: None {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 965.018649] env[61898]: DEBUG nova.scheduler.client.report [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Refreshing trait associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 965.018925] env[61898]: DEBUG nova.compute.provider_tree [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Updating resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 generation from 118 to 119 during operation: update_traits {{(pid=61898) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 965.165616] env[61898]: DEBUG nova.compute.manager [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 965.165842] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.166776] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5270eaa-5396-4900-b14d-e99b4afb92f3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.177768] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.178157] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab9aa8fa-eb07-4b23-b86a-c96db43f448e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.188076] env[61898]: DEBUG oslo_vmware.api [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 965.188076] env[61898]: value = "task-1241079" [ 965.188076] env[61898]: _type = "Task" [ 965.188076] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.196312] env[61898]: DEBUG oslo_vmware.api [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.201084] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212924d0-998b-4374-9e08-2e0e91c8a8c3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.207919] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c42259-526a-42d6-a3eb-8b5f99414a61 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.242433] env[61898]: DEBUG nova.compute.manager [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 965.243342] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.243747] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5644accf-42c7-4423-bed2-2844782e2483 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.247098] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bd5d64-2c9c-4676-aab2-b2f23a886421 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.257214] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bc8de0-12c2-4ed6-9787-41f6f5201b1b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.261076] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.261324] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0900a91b-7018-4894-8116-401463185d68 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.274098] env[61898]: DEBUG nova.compute.provider_tree [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.275302] env[61898]: DEBUG oslo_vmware.api [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 965.275302] env[61898]: value = "task-1241080" [ 965.275302] env[61898]: _type = "Task" [ 965.275302] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.282806] env[61898]: DEBUG oslo_vmware.api [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241080, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.300600] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.364561] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241078, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081232} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.365024] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 965.365726] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff42ad01-b019-423e-8d2d-4d14ebfaa0f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.389032] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] c26c4add-728c-45ea-8465-7c4273b7d97b/c26c4add-728c-45ea-8465-7c4273b7d97b.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 965.390971] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e42f03b7-bc21-4355-b317-05427009c499 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.407237] env[61898]: DEBUG nova.network.neutron [-] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.415433] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 965.415433] env[61898]: value = "task-1241081" [ 965.415433] env[61898]: _type = "Task" [ 965.415433] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.428090] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.429027] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896126da-0c88-441e-baff-63a027e420e1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.437515] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8713e1-a0c4-4284-92b5-4ccd70d39ce3 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Suspending the VM {{(pid=61898) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 965.437944] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-25216f45-473e-4b55-a818-81d3f3979a79 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.445781] env[61898]: DEBUG oslo_vmware.api [None req-6f8713e1-a0c4-4284-92b5-4ccd70d39ce3 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 965.445781] env[61898]: value = "task-1241082" [ 965.445781] env[61898]: _type = "Task" [ 965.445781] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.455630] env[61898]: DEBUG oslo_vmware.api [None req-6f8713e1-a0c4-4284-92b5-4ccd70d39ce3 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241082, 'name': SuspendVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.497363] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "7eb0d534-90c8-439d-a894-3f03151ac74b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.497547] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "7eb0d534-90c8-439d-a894-3f03151ac74b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.550849] env[61898]: DEBUG nova.compute.manager [req-d4334527-4fdb-4b09-be0d-4bd8cf249e78 req-30f982d6-0d65-4970-ae6c-215cf12be9e8 service nova] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Received event network-vif-deleted-9f33f2c4-4626-4230-90ea-e91c5f0da486 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 965.698014] env[61898]: DEBUG oslo_vmware.api [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.777530] env[61898]: DEBUG nova.scheduler.client.report [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 965.803620] env[61898]: DEBUG oslo_vmware.api [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241080, 'name': PowerOffVM_Task, 'duration_secs': 0.374587} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.804984] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.805418] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.806240] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-effd1214-758b-40ba-8535-5ea31c1d8746 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.814491] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.880993] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.881810] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.881810] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleting the datastore file [datastore1] cd1335b7-78b7-4cea-add7-dd69736067b0 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.881810] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da6108a9-3ee7-4a25-b121-dbd41a260d70 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.890247] env[61898]: DEBUG oslo_vmware.api [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 965.890247] env[61898]: value = "task-1241084" [ 965.890247] env[61898]: _type = "Task" [ 965.890247] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.899170] env[61898]: DEBUG oslo_vmware.api [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.910942] env[61898]: INFO nova.compute.manager [-] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Took 1.17 seconds to deallocate network for instance. [ 965.924479] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.957611] env[61898]: DEBUG oslo_vmware.api [None req-6f8713e1-a0c4-4284-92b5-4ccd70d39ce3 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241082, 'name': SuspendVM_Task} progress is 62%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.001337] env[61898]: DEBUG nova.compute.manager [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 966.199334] env[61898]: DEBUG oslo_vmware.api [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.291696] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.170s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.295124] env[61898]: DEBUG nova.compute.manager [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 966.296384] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.319s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.297972] env[61898]: INFO nova.compute.claims [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.313157] env[61898]: DEBUG oslo_vmware.api [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241066, 'name': ReconfigVM_Task, 'duration_secs': 6.901021} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.313157] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.313720] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Reconfigured VM to detach interface {{(pid=61898) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 966.406027] env[61898]: DEBUG oslo_vmware.api [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221144} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.406027] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.406027] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.406027] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.406027] env[61898]: INFO nova.compute.manager [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 966.406027] env[61898]: DEBUG oslo.service.loopingcall [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.406027] env[61898]: DEBUG nova.compute.manager [-] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 966.406027] env[61898]: DEBUG nova.network.neutron [-] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.420092] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.428161] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241081, 'name': ReconfigVM_Task, 'duration_secs': 0.728435} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.428721] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Reconfigured VM instance instance-00000060 to attach disk [datastore2] c26c4add-728c-45ea-8465-7c4273b7d97b/c26c4add-728c-45ea-8465-7c4273b7d97b.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.429550] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab18795c-4f2c-4995-9270-55357f6fe171 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.439024] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 966.439024] env[61898]: value = "task-1241085" [ 966.439024] env[61898]: _type = "Task" [ 966.439024] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.446792] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241085, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.454987] env[61898]: DEBUG oslo_vmware.api [None req-6f8713e1-a0c4-4284-92b5-4ccd70d39ce3 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241082, 'name': SuspendVM_Task, 'duration_secs': 0.725893} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.455548] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8713e1-a0c4-4284-92b5-4ccd70d39ce3 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Suspended the VM {{(pid=61898) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 966.455941] env[61898]: DEBUG nova.compute.manager [None req-6f8713e1-a0c4-4284-92b5-4ccd70d39ce3 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 966.457085] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa739f5-1c8f-4b78-87ca-955381ca849e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.530766] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.701445] env[61898]: DEBUG oslo_vmware.api [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241079, 'name': PowerOffVM_Task, 'duration_secs': 1.111682} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.701723] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 966.701895] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 966.702200] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-264781e6-38a0-48df-ab3d-378955d0c673 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.767372] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 966.767635] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 966.767813] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleting the datastore file [datastore1] cf428138-4d0d-43bf-a654-06a62a82c9a1 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 966.768109] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-963385ea-7bb8-43fa-9be7-b1ca4688ab93 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.775796] env[61898]: DEBUG oslo_vmware.api [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 966.775796] env[61898]: value = "task-1241087" [ 966.775796] env[61898]: _type = "Task" [ 966.775796] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.784977] env[61898]: DEBUG oslo_vmware.api [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.803543] env[61898]: DEBUG nova.compute.utils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 966.805080] env[61898]: DEBUG nova.compute.manager [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 966.805275] env[61898]: DEBUG nova.network.neutron [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 966.957324] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241085, 'name': Rename_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.093163] env[61898]: DEBUG nova.policy [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '55d6a8ba4298448ab1c60e40ded4f713', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f1a7a5308254182b0a08cbe226ab393', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 967.287760] env[61898]: DEBUG oslo_vmware.api [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136158} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.288067] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 967.288269] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 967.288529] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 967.288704] env[61898]: INFO nova.compute.manager [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Took 2.12 seconds to destroy the instance on the hypervisor. [ 967.288962] env[61898]: DEBUG oslo.service.loopingcall [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 967.289178] env[61898]: DEBUG nova.compute.manager [-] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 967.289276] env[61898]: DEBUG nova.network.neutron [-] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 967.310909] env[61898]: DEBUG nova.compute.manager [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 967.459143] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241085, 'name': Rename_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.603510] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb21208-34e5-48a7-a9ef-5217810ebf03 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.616884] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70250ef9-f6a1-454c-a737-7d21df15fdb2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.647817] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14dba24-415c-448c-aa04-d18203943391 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.656604] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342bbf98-8aea-4159-aacd-8f44e88a2def {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.673174] env[61898]: DEBUG nova.compute.provider_tree [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.701758] env[61898]: DEBUG nova.compute.manager [req-e354269d-c657-4ca5-9769-58639a22c55a req-f4dfaf36-ec19-4ec1-a231-64ef3df17926 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Received event network-vif-deleted-53aab5ac-41d3-4125-8cee-3a013242a542 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 967.701998] env[61898]: INFO nova.compute.manager [req-e354269d-c657-4ca5-9769-58639a22c55a req-f4dfaf36-ec19-4ec1-a231-64ef3df17926 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Neutron deleted interface 53aab5ac-41d3-4125-8cee-3a013242a542; detaching it from the instance and deleting it from the info cache [ 967.702195] env[61898]: DEBUG nova.network.neutron [req-e354269d-c657-4ca5-9769-58639a22c55a req-f4dfaf36-ec19-4ec1-a231-64ef3df17926 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.729249] env[61898]: DEBUG nova.compute.manager [req-9e2a186c-fe75-45ca-b49a-e20a62c59db0 req-686e622e-1652-4ca3-983e-c3cda2456154 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Received event network-vif-deleted-dfa99f09-26b3-43d9-8c91-58911fb6fcd9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 967.729249] env[61898]: INFO nova.compute.manager [req-9e2a186c-fe75-45ca-b49a-e20a62c59db0 req-686e622e-1652-4ca3-983e-c3cda2456154 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Neutron deleted interface dfa99f09-26b3-43d9-8c91-58911fb6fcd9; detaching it from the instance and deleting it from the info cache [ 967.729249] env[61898]: DEBUG nova.network.neutron [req-9e2a186c-fe75-45ca-b49a-e20a62c59db0 req-686e622e-1652-4ca3-983e-c3cda2456154 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.844795] env[61898]: DEBUG nova.network.neutron [-] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.951257] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241085, 'name': Rename_Task, 'duration_secs': 1.146878} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.951257] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.951774] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-451b8179-0105-4e12-907e-24e2d876f912 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.954192] env[61898]: DEBUG nova.network.neutron [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Successfully created port: db84f935-3ee0-4a20-b18a-f05801372bd9 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 967.961128] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 967.961128] env[61898]: value = "task-1241088" [ 967.961128] env[61898]: _type = "Task" [ 967.961128] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.971811] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241088, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.034986] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.034986] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.080520] env[61898]: DEBUG nova.network.neutron [-] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.176840] env[61898]: DEBUG nova.scheduler.client.report [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 968.204897] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5057c50-df75-477b-bd14-8dfb9b4681f1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.215274] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcc28d5-8dbe-47bb-8bb5-e3a0c292217d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.231692] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88b40102-6d49-47b2-b77c-7c99b4756a84 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.250409] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1e4d23-c10f-4483-855f-dd4ba0a502b0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.262914] env[61898]: DEBUG nova.compute.manager [req-e354269d-c657-4ca5-9769-58639a22c55a req-f4dfaf36-ec19-4ec1-a231-64ef3df17926 service nova] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Detach interface failed, port_id=53aab5ac-41d3-4125-8cee-3a013242a542, reason: Instance cd1335b7-78b7-4cea-add7-dd69736067b0 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 968.280813] env[61898]: DEBUG nova.compute.manager [req-9e2a186c-fe75-45ca-b49a-e20a62c59db0 req-686e622e-1652-4ca3-983e-c3cda2456154 service nova] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Detach interface failed, port_id=dfa99f09-26b3-43d9-8c91-58911fb6fcd9, reason: Instance cf428138-4d0d-43bf-a654-06a62a82c9a1 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 968.326339] env[61898]: DEBUG nova.compute.manager [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 968.345251] env[61898]: INFO nova.compute.manager [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Resuming [ 968.345877] env[61898]: DEBUG nova.objects.instance [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lazy-loading 'flavor' on Instance uuid 45b8dc91-b577-4548-bf3a-32c7c936c616 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.347531] env[61898]: INFO nova.compute.manager [-] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Took 1.94 seconds to deallocate network for instance. [ 968.366790] env[61898]: DEBUG nova.virt.hardware [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 968.366790] env[61898]: DEBUG nova.virt.hardware [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 968.366790] env[61898]: DEBUG nova.virt.hardware [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 968.366790] env[61898]: DEBUG nova.virt.hardware [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 968.367138] env[61898]: DEBUG nova.virt.hardware [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 968.367263] env[61898]: DEBUG nova.virt.hardware [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 968.367524] env[61898]: DEBUG nova.virt.hardware [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 968.367698] env[61898]: DEBUG nova.virt.hardware [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 968.367879] env[61898]: DEBUG nova.virt.hardware [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 968.368040] env[61898]: DEBUG nova.virt.hardware [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 968.368225] env[61898]: DEBUG nova.virt.hardware [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 968.369436] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da7cd82-92e2-482e-b8cf-be9044d0448d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.379501] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36aa490-9f98-40e5-924f-9da0cb87cd26 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.479139] env[61898]: DEBUG oslo_vmware.api [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241088, 'name': PowerOnVM_Task, 'duration_secs': 0.471127} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.479440] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.479675] env[61898]: INFO nova.compute.manager [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Took 10.46 seconds to spawn the instance on the hypervisor. [ 968.479882] env[61898]: DEBUG nova.compute.manager [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 968.480762] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65efee6-4fd1-4bb3-9adc-305ed32206df {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.538095] env[61898]: INFO nova.compute.manager [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Detaching volume b04c905b-4035-4be9-9960-21b687a5e2a9 [ 968.563797] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.564081] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.564385] env[61898]: DEBUG nova.network.neutron [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.583490] env[61898]: INFO nova.compute.manager [-] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Took 1.29 seconds to deallocate network for instance. [ 968.595045] env[61898]: INFO nova.virt.block_device [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Attempting to driver detach volume b04c905b-4035-4be9-9960-21b687a5e2a9 from mountpoint /dev/sdb [ 968.595482] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Volume detach. Driver type: vmdk {{(pid=61898) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 968.595813] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267683', 'volume_id': 'b04c905b-4035-4be9-9960-21b687a5e2a9', 'name': 'volume-b04c905b-4035-4be9-9960-21b687a5e2a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '070bc0cc-ff77-48b8-bd08-f17fe69e25af', 'attached_at': '', 'detached_at': '', 'volume_id': 'b04c905b-4035-4be9-9960-21b687a5e2a9', 'serial': 'b04c905b-4035-4be9-9960-21b687a5e2a9'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 968.597176] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e00777f-e5e1-44e2-91d1-9f3d3cd0b399 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.619405] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b4cde6-11b1-4277-b10a-7ee29859a3e1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.626759] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0036ee8-ac89-4413-a812-cdfd1f97b846 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.650236] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae02f4b-f379-4023-b010-a504b6851d8f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.666091] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] The volume has not been displaced from its original location: [datastore1] volume-b04c905b-4035-4be9-9960-21b687a5e2a9/volume-b04c905b-4035-4be9-9960-21b687a5e2a9.vmdk. No consolidation needed. {{(pid=61898) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 968.671857] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Reconfiguring VM instance instance-00000045 to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 968.671857] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbea8dba-5a5b-4cae-b28a-a0ca9d817b9a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.685519] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.686028] env[61898]: DEBUG nova.compute.manager [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 968.688951] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.271s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.689193] env[61898]: DEBUG nova.objects.instance [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'resources' on Instance uuid 7c6aad92-6e91-48fc-89ae-5ee4c89f449c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.693578] env[61898]: DEBUG oslo_vmware.api [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 968.693578] env[61898]: value = "task-1241089" [ 968.693578] env[61898]: _type = "Task" [ 968.693578] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.703411] env[61898]: DEBUG oslo_vmware.api [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241089, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.859890] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.006243] env[61898]: INFO nova.compute.manager [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Took 29.06 seconds to build instance. [ 969.102163] env[61898]: DEBUG oslo_concurrency.lockutils [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.191959] env[61898]: DEBUG nova.compute.utils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 969.198103] env[61898]: DEBUG nova.compute.manager [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 969.199155] env[61898]: DEBUG nova.network.neutron [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 969.217860] env[61898]: DEBUG oslo_vmware.api [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241089, 'name': ReconfigVM_Task, 'duration_secs': 0.4204} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.218283] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Reconfigured VM instance instance-00000045 to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 969.224113] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ce35708-6732-4ccc-bece-04117776cef2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.247270] env[61898]: DEBUG oslo_vmware.api [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 969.247270] env[61898]: value = "task-1241090" [ 969.247270] env[61898]: _type = "Task" [ 969.247270] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.258208] env[61898]: DEBUG oslo_vmware.api [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241090, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.261946] env[61898]: DEBUG nova.policy [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee555b0896f748d1886e7037911db84f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c2e835a924c438287e7626c34c2fb05', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 969.307322] env[61898]: INFO nova.network.neutron [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Port 1b2175da-a7e5-4786-a4f6-780fb83f447c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 969.307785] env[61898]: DEBUG nova.network.neutron [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updating instance_info_cache with network_info: [{"id": "62f1251d-f84b-4c28-ab74-971fef0d640f", "address": "fa:16:3e:27:01:3e", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62f1251d-f8", "ovs_interfaceid": "62f1251d-f84b-4c28-ab74-971fef0d640f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.355732] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.356073] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquired lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.356377] env[61898]: DEBUG nova.network.neutron [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.478047] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b631710-8148-4b4b-aac0-4f2f0fe47b66 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.486207] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d66e5a-84c2-4d90-9049-825f01c02646 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.519786] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2f95c96-a530-4b17-b59c-838ffcad291a tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.595s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.520819] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44694804-235f-4a20-914b-05b10a7f2e86 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.528938] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f408576b-b686-4de4-a7bc-4551bdc9e8d7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.543275] env[61898]: DEBUG nova.compute.provider_tree [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.641776] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "interface-4522f4ef-c8f6-4fe1-acd5-796f87f22839-1b2175da-a7e5-4786-a4f6-780fb83f447c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.642187] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-4522f4ef-c8f6-4fe1-acd5-796f87f22839-1b2175da-a7e5-4786-a4f6-780fb83f447c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.642719] env[61898]: DEBUG nova.objects.instance [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'flavor' on Instance uuid 4522f4ef-c8f6-4fe1-acd5-796f87f22839 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.651183] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquiring lock "587c9997-3b6d-4654-9cf3-f181833c0728" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.651425] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Lock "587c9997-3b6d-4654-9cf3-f181833c0728" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.698955] env[61898]: DEBUG nova.compute.manager [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 969.757469] env[61898]: DEBUG oslo_vmware.api [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241090, 'name': ReconfigVM_Task, 'duration_secs': 0.216705} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.757788] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267683', 'volume_id': 'b04c905b-4035-4be9-9960-21b687a5e2a9', 'name': 'volume-b04c905b-4035-4be9-9960-21b687a5e2a9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '070bc0cc-ff77-48b8-bd08-f17fe69e25af', 'attached_at': '', 'detached_at': '', 'volume_id': 'b04c905b-4035-4be9-9960-21b687a5e2a9', 'serial': 'b04c905b-4035-4be9-9960-21b687a5e2a9'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 969.810844] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.853517] env[61898]: DEBUG nova.network.neutron [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Successfully created port: 4d73c23b-b607-471d-a628-1fcb200b386c {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 970.046825] env[61898]: DEBUG nova.scheduler.client.report [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 970.115498] env[61898]: DEBUG nova.network.neutron [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updating instance_info_cache with network_info: [{"id": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "address": "fa:16:3e:be:2b:9a", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53a6375d-a9", "ovs_interfaceid": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.153344] env[61898]: DEBUG nova.compute.manager [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 970.166196] env[61898]: DEBUG nova.network.neutron [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Successfully updated port: db84f935-3ee0-4a20-b18a-f05801372bd9 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 970.253974] env[61898]: DEBUG nova.objects.instance [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'pci_requests' on Instance uuid 4522f4ef-c8f6-4fe1-acd5-796f87f22839 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.315789] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a207f07b-8292-4817-a06b-beaa2f0d0201 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-1b2175da-a7e5-4786-a4f6-780fb83f447c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.618s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.329640] env[61898]: DEBUG nova.objects.instance [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lazy-loading 'flavor' on Instance uuid 070bc0cc-ff77-48b8-bd08-f17fe69e25af {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.370251] env[61898]: DEBUG nova.compute.manager [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Received event network-changed-62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 970.370487] env[61898]: DEBUG nova.compute.manager [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing instance network info cache due to event network-changed-62f1251d-f84b-4c28-ab74-971fef0d640f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 970.370921] env[61898]: DEBUG oslo_concurrency.lockutils [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] Acquiring lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.370921] env[61898]: DEBUG oslo_concurrency.lockutils [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] Acquired lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.370921] env[61898]: DEBUG nova.network.neutron [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Refreshing network info cache for port 62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 970.386761] env[61898]: DEBUG nova.compute.manager [req-560cda7d-ecb7-4f15-8e5b-b9d3ef8fe48d req-066c9484-cf04-489e-bb1a-499d31a32a2c service nova] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Received event network-vif-plugged-db84f935-3ee0-4a20-b18a-f05801372bd9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 970.386761] env[61898]: DEBUG oslo_concurrency.lockutils [req-560cda7d-ecb7-4f15-8e5b-b9d3ef8fe48d req-066c9484-cf04-489e-bb1a-499d31a32a2c service nova] Acquiring lock "06c894a2-9236-4534-922f-4255c6cf0531-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.387119] env[61898]: DEBUG oslo_concurrency.lockutils [req-560cda7d-ecb7-4f15-8e5b-b9d3ef8fe48d req-066c9484-cf04-489e-bb1a-499d31a32a2c service nova] Lock "06c894a2-9236-4534-922f-4255c6cf0531-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.387119] env[61898]: DEBUG oslo_concurrency.lockutils [req-560cda7d-ecb7-4f15-8e5b-b9d3ef8fe48d req-066c9484-cf04-489e-bb1a-499d31a32a2c service nova] Lock "06c894a2-9236-4534-922f-4255c6cf0531-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.387333] env[61898]: DEBUG nova.compute.manager [req-560cda7d-ecb7-4f15-8e5b-b9d3ef8fe48d req-066c9484-cf04-489e-bb1a-499d31a32a2c service nova] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] No waiting events found dispatching network-vif-plugged-db84f935-3ee0-4a20-b18a-f05801372bd9 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 970.387507] env[61898]: WARNING nova.compute.manager [req-560cda7d-ecb7-4f15-8e5b-b9d3ef8fe48d req-066c9484-cf04-489e-bb1a-499d31a32a2c service nova] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Received unexpected event network-vif-plugged-db84f935-3ee0-4a20-b18a-f05801372bd9 for instance with vm_state building and task_state spawning. [ 970.457017] env[61898]: DEBUG nova.compute.manager [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Stashing vm_state: active {{(pid=61898) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 970.550199] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.861s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.552666] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.022s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.554683] env[61898]: INFO nova.compute.claims [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 970.572344] env[61898]: INFO nova.scheduler.client.report [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleted allocations for instance 7c6aad92-6e91-48fc-89ae-5ee4c89f449c [ 970.618050] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Releasing lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.619106] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e463bb49-01af-47b2-9a70-219dd66e1b80 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.627206] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Resuming the VM {{(pid=61898) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 970.627671] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05428b3a-836d-43f1-9ae7-835d66f91e91 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.634275] env[61898]: DEBUG oslo_vmware.api [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 970.634275] env[61898]: value = "task-1241091" [ 970.634275] env[61898]: _type = "Task" [ 970.634275] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.641954] env[61898]: DEBUG oslo_vmware.api [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241091, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.670068] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquiring lock "refresh_cache-06c894a2-9236-4534-922f-4255c6cf0531" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.670192] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquired lock "refresh_cache-06c894a2-9236-4534-922f-4255c6cf0531" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.670326] env[61898]: DEBUG nova.network.neutron [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 970.676303] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.709962] env[61898]: DEBUG nova.compute.manager [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 970.736228] env[61898]: DEBUG nova.virt.hardware [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.736588] env[61898]: DEBUG nova.virt.hardware [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.736676] env[61898]: DEBUG nova.virt.hardware [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.736863] env[61898]: DEBUG nova.virt.hardware [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.737024] env[61898]: DEBUG nova.virt.hardware [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.737182] env[61898]: DEBUG nova.virt.hardware [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.737395] env[61898]: DEBUG nova.virt.hardware [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.737586] env[61898]: DEBUG nova.virt.hardware [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.737800] env[61898]: DEBUG nova.virt.hardware [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.737977] env[61898]: DEBUG nova.virt.hardware [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.738178] env[61898]: DEBUG nova.virt.hardware [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.739077] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4148fa-6531-47b8-bc56-647f371852a7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.747219] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545aef39-25dd-4e28-a72e-eaed5b6a3629 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.762187] env[61898]: DEBUG nova.objects.base [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Object Instance<4522f4ef-c8f6-4fe1-acd5-796f87f22839> lazy-loaded attributes: flavor,pci_requests {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 970.762456] env[61898]: DEBUG nova.network.neutron [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 970.833791] env[61898]: DEBUG nova.policy [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '080b70de4bd5465e8e696943b90f4ff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e2c65efa327e403284ad2e78b3c7b7d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 970.978361] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.081520] env[61898]: DEBUG oslo_concurrency.lockutils [None req-26db1ff5-b089-4602-ae72-59f8f1883a48 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "7c6aad92-6e91-48fc-89ae-5ee4c89f449c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.508s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.103097] env[61898]: DEBUG nova.network.neutron [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updated VIF entry in instance network info cache for port 62f1251d-f84b-4c28-ab74-971fef0d640f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 971.103553] env[61898]: DEBUG nova.network.neutron [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updating instance_info_cache with network_info: [{"id": "62f1251d-f84b-4c28-ab74-971fef0d640f", "address": "fa:16:3e:27:01:3e", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62f1251d-f8", "ovs_interfaceid": "62f1251d-f84b-4c28-ab74-971fef0d640f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.144794] env[61898]: DEBUG oslo_vmware.api [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241091, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.202083] env[61898]: DEBUG nova.network.neutron [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 971.339905] env[61898]: DEBUG oslo_concurrency.lockutils [None req-2981055f-925d-46d0-b9e8-c3284c24d5e7 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.306s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.440034] env[61898]: DEBUG nova.network.neutron [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Updating instance_info_cache with network_info: [{"id": "db84f935-3ee0-4a20-b18a-f05801372bd9", "address": "fa:16:3e:d6:07:e0", "network": {"id": "cc3313cf-b508-4050-8a32-ad85e5dffe2a", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-194406996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1a7a5308254182b0a08cbe226ab393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb84f935-3e", "ovs_interfaceid": "db84f935-3ee0-4a20-b18a-f05801372bd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.607154] env[61898]: DEBUG oslo_concurrency.lockutils [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] Releasing lock "refresh_cache-bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.607400] env[61898]: DEBUG nova.compute.manager [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Received event network-changed-4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 971.607607] env[61898]: DEBUG nova.compute.manager [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Refreshing instance network info cache due to event network-changed-4bed7107-cc7d-431f-a835-84a51f188455. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 971.607958] env[61898]: DEBUG oslo_concurrency.lockutils [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] Acquiring lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.608217] env[61898]: DEBUG oslo_concurrency.lockutils [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] Acquired lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.608515] env[61898]: DEBUG nova.network.neutron [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Refreshing network info cache for port 4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 971.647424] env[61898]: DEBUG oslo_vmware.api [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241091, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.775618] env[61898]: DEBUG nova.network.neutron [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Successfully updated port: 4d73c23b-b607-471d-a628-1fcb200b386c {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 971.816189] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10edc79-8add-4caf-ba3e-05e468f42196 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.824520] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2928fc59-3fae-4f18-911b-da9c98318ae9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.861101] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45af82d5-d6d4-4cea-9933-e37be7820df0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.874171] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef85d5d5-23a9-43e8-8e08-c0072dbf6dd4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.890669] env[61898]: DEBUG nova.compute.provider_tree [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.943274] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Releasing lock "refresh_cache-06c894a2-9236-4534-922f-4255c6cf0531" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.943937] env[61898]: DEBUG nova.compute.manager [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Instance network_info: |[{"id": "db84f935-3ee0-4a20-b18a-f05801372bd9", "address": "fa:16:3e:d6:07:e0", "network": {"id": "cc3313cf-b508-4050-8a32-ad85e5dffe2a", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-194406996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1a7a5308254182b0a08cbe226ab393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb84f935-3e", "ovs_interfaceid": "db84f935-3ee0-4a20-b18a-f05801372bd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 971.944793] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:07:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '88651df2-0506-4f6c-b868-dd30a81f2b1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db84f935-3ee0-4a20-b18a-f05801372bd9', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 971.958038] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Creating folder: Project (7f1a7a5308254182b0a08cbe226ab393). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 971.958358] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51fdf83a-6c1d-4f29-ab9f-0c5c37484898 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.970745] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Created folder: Project (7f1a7a5308254182b0a08cbe226ab393) in parent group-v267550. [ 971.970991] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Creating folder: Instances. Parent ref: group-v267702. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 971.971259] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a64c1bea-5585-438b-85a4-2dbe15d1bede {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.981529] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Created folder: Instances in parent group-v267702. [ 971.981529] env[61898]: DEBUG oslo.service.loopingcall [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 971.981812] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 971.982066] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc5ceadf-6949-4231-8510-bec7053d7d54 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.002619] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 972.002619] env[61898]: value = "task-1241094" [ 972.002619] env[61898]: _type = "Task" [ 972.002619] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.011646] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241094, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.145187] env[61898]: DEBUG oslo_vmware.api [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241091, 'name': PowerOnVM_Task, 'duration_secs': 1.11418} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.145479] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Resumed the VM {{(pid=61898) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 972.145692] env[61898]: DEBUG nova.compute.manager [None req-93bd874f-33cd-4cec-a7bb-557c3b973065 tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 972.146506] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711057eb-d77c-460a-9eda-85f78a5ccd4d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.285251] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.285510] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.285696] env[61898]: DEBUG nova.network.neutron [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.348772] env[61898]: DEBUG nova.network.neutron [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Successfully updated port: 1b2175da-a7e5-4786-a4f6-780fb83f447c {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.369021] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.369283] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.369516] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.369730] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.369908] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.375644] env[61898]: INFO nova.compute.manager [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Terminating instance [ 972.394253] env[61898]: DEBUG nova.scheduler.client.report [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 972.413076] env[61898]: DEBUG nova.network.neutron [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updated VIF entry in instance network info cache for port 4bed7107-cc7d-431f-a835-84a51f188455. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 972.413494] env[61898]: DEBUG nova.network.neutron [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updating instance_info_cache with network_info: [{"id": "4bed7107-cc7d-431f-a835-84a51f188455", "address": "fa:16:3e:44:b0:e6", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bed7107-cc", "ovs_interfaceid": "4bed7107-cc7d-431f-a835-84a51f188455", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.513075] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241094, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.837260] env[61898]: DEBUG nova.network.neutron [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 972.852224] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.879525] env[61898]: DEBUG nova.compute.manager [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 972.879643] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.880512] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab37016-adb0-48bb-849d-c40b6d94f9eb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.888531] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.888811] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf35a2f7-4f2d-490a-ae6b-0448929847f6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.897306] env[61898]: DEBUG oslo_vmware.api [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 972.897306] env[61898]: value = "task-1241095" [ 972.897306] env[61898]: _type = "Task" [ 972.897306] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.901426] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.349s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.901940] env[61898]: DEBUG nova.compute.manager [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 972.904815] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.045s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.905088] env[61898]: DEBUG nova.objects.instance [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lazy-loading 'resources' on Instance uuid cd1335b7-78b7-4cea-add7-dd69736067b0 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.914636] env[61898]: DEBUG oslo_vmware.api [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.915781] env[61898]: DEBUG oslo_concurrency.lockutils [req-60d47b0d-e68d-41c9-9087-f0217492262f req-02508f99-6519-4407-bde1-5f62b146671a service nova] Releasing lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.916184] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.916416] env[61898]: DEBUG nova.network.neutron [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 972.937476] env[61898]: DEBUG nova.compute.manager [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Received event network-vif-plugged-1b2175da-a7e5-4786-a4f6-780fb83f447c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 972.937706] env[61898]: DEBUG oslo_concurrency.lockutils [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] Acquiring lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.937929] env[61898]: DEBUG oslo_concurrency.lockutils [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] Lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.938116] env[61898]: DEBUG oslo_concurrency.lockutils [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] Lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.938292] env[61898]: DEBUG nova.compute.manager [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] No waiting events found dispatching network-vif-plugged-1b2175da-a7e5-4786-a4f6-780fb83f447c {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 972.938532] env[61898]: WARNING nova.compute.manager [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Received unexpected event network-vif-plugged-1b2175da-a7e5-4786-a4f6-780fb83f447c for instance with vm_state active and task_state None. [ 972.938660] env[61898]: DEBUG nova.compute.manager [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Received event network-changed-1b2175da-a7e5-4786-a4f6-780fb83f447c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 972.938822] env[61898]: DEBUG nova.compute.manager [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Refreshing instance network info cache due to event network-changed-1b2175da-a7e5-4786-a4f6-780fb83f447c. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 972.939038] env[61898]: DEBUG oslo_concurrency.lockutils [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] Acquiring lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.959500] env[61898]: DEBUG nova.compute.manager [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Received event network-changed-db84f935-3ee0-4a20-b18a-f05801372bd9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 972.959707] env[61898]: DEBUG nova.compute.manager [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Refreshing instance network info cache due to event network-changed-db84f935-3ee0-4a20-b18a-f05801372bd9. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 972.959921] env[61898]: DEBUG oslo_concurrency.lockutils [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] Acquiring lock "refresh_cache-06c894a2-9236-4534-922f-4255c6cf0531" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.960328] env[61898]: DEBUG oslo_concurrency.lockutils [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] Acquired lock "refresh_cache-06c894a2-9236-4534-922f-4255c6cf0531" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.960468] env[61898]: DEBUG nova.network.neutron [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Refreshing network info cache for port db84f935-3ee0-4a20-b18a-f05801372bd9 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 973.013605] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241094, 'name': CreateVM_Task, 'duration_secs': 0.653115} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.013791] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 973.014432] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.014746] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.014947] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 973.015237] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-654a66d9-cf5c-482e-88ab-e76e0daec812 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.020190] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for the task: (returnval){ [ 973.020190] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f02c3b-d71f-6040-b7b3-92bf8c94b748" [ 973.020190] env[61898]: _type = "Task" [ 973.020190] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.028783] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f02c3b-d71f-6040-b7b3-92bf8c94b748, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.044438] env[61898]: DEBUG nova.network.neutron [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance_info_cache with network_info: [{"id": "4d73c23b-b607-471d-a628-1fcb200b386c", "address": "fa:16:3e:da:c8:ce", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d73c23b-b6", "ovs_interfaceid": "4d73c23b-b607-471d-a628-1fcb200b386c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.352873] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "d0184b78-1525-44a4-a515-3eeb34a59cde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.352873] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.379540] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.379791] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.409174] env[61898]: DEBUG nova.compute.utils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 973.412647] env[61898]: DEBUG oslo_vmware.api [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241095, 'name': PowerOffVM_Task, 'duration_secs': 0.460262} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.413886] env[61898]: DEBUG nova.compute.manager [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 973.414092] env[61898]: DEBUG nova.network.neutron [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 973.416451] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.416607] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.416891] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f4c9912-4b61-49af-9243-b5ec9d983197 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.453908] env[61898]: WARNING nova.network.neutron [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] 89882853-88ec-48f1-a883-3be9e65f9fd8 already exists in list: networks containing: ['89882853-88ec-48f1-a883-3be9e65f9fd8']. ignoring it [ 973.501111] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.501111] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.501111] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleting the datastore file [datastore2] 070bc0cc-ff77-48b8-bd08-f17fe69e25af {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.501111] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1acb93f4-e8a0-4a8e-b335-863ca28b96ea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.506048] env[61898]: DEBUG oslo_vmware.api [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 973.506048] env[61898]: value = "task-1241097" [ 973.506048] env[61898]: _type = "Task" [ 973.506048] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.519313] env[61898]: DEBUG oslo_vmware.api [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.532141] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f02c3b-d71f-6040-b7b3-92bf8c94b748, 'name': SearchDatastore_Task, 'duration_secs': 0.025201} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.532275] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.532682] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 973.533084] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.533394] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.533760] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.534164] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc2b74ef-266d-487f-9f40-512af3584558 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.550015] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.550015] env[61898]: DEBUG nova.compute.manager [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Instance network_info: |[{"id": "4d73c23b-b607-471d-a628-1fcb200b386c", "address": "fa:16:3e:da:c8:ce", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d73c23b-b6", "ovs_interfaceid": "4d73c23b-b607-471d-a628-1fcb200b386c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 973.550015] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:c8:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69f65356-c85e-4b7f-ad28-7c7b5e8cf50c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d73c23b-b607-471d-a628-1fcb200b386c', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 973.556154] env[61898]: DEBUG oslo.service.loopingcall [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.559894] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 973.560682] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.562017] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 973.562017] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c353c48-069d-4a78-b5f5-8087a379ebd3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.577433] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db539ced-4185-402e-be7d-5322bbba035e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.587735] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for the task: (returnval){ [ 973.587735] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52bb320e-714f-869a-fbc2-d3c3665ad515" [ 973.587735] env[61898]: _type = "Task" [ 973.587735] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.593306] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 973.593306] env[61898]: value = "task-1241098" [ 973.593306] env[61898]: _type = "Task" [ 973.593306] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.603735] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52bb320e-714f-869a-fbc2-d3c3665ad515, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.607918] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241098, 'name': CreateVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.679113] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2edae2c-4a49-4e8c-8b91-20ce499e8431 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.686770] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468f9fad-ded6-46f0-b3f4-7c169abeec9a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.723818] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515215d5-eaa2-42aa-82df-9f5ab92e7879 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.732911] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abdbc33-fac0-4de8-b6da-654fd612a5d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.736556] env[61898]: DEBUG nova.policy [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e2c909f4306477d8fc741ab3aac9d02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e8b71885c83418fb13e216f804ffeeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 973.750360] env[61898]: DEBUG nova.compute.provider_tree [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 973.801837] env[61898]: DEBUG nova.network.neutron [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updating instance_info_cache with network_info: [{"id": "4bed7107-cc7d-431f-a835-84a51f188455", "address": "fa:16:3e:44:b0:e6", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bed7107-cc", "ovs_interfaceid": "4bed7107-cc7d-431f-a835-84a51f188455", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1b2175da-a7e5-4786-a4f6-780fb83f447c", "address": "fa:16:3e:c5:73:bd", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b2175da-a7", "ovs_interfaceid": "1b2175da-a7e5-4786-a4f6-780fb83f447c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.854537] env[61898]: DEBUG nova.compute.manager [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 973.887089] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.887089] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 973.913890] env[61898]: DEBUG nova.compute.manager [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 974.024900] env[61898]: DEBUG oslo_vmware.api [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250506} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.028415] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.028828] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.029096] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.029718] env[61898]: INFO nova.compute.manager [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Took 1.15 seconds to destroy the instance on the hypervisor. [ 974.029718] env[61898]: DEBUG oslo.service.loopingcall [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.029858] env[61898]: DEBUG nova.compute.manager [-] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 974.033025] env[61898]: DEBUG nova.network.neutron [-] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.109151] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52bb320e-714f-869a-fbc2-d3c3665ad515, 'name': SearchDatastore_Task, 'duration_secs': 0.012082} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.112775] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241098, 'name': CreateVM_Task, 'duration_secs': 0.323253} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.112986] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64f57733-028f-4675-9b98-68e4fda22d15 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.115471] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 974.116076] env[61898]: DEBUG nova.network.neutron [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Updated VIF entry in instance network info cache for port db84f935-3ee0-4a20-b18a-f05801372bd9. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 974.116491] env[61898]: DEBUG nova.network.neutron [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Updating instance_info_cache with network_info: [{"id": "db84f935-3ee0-4a20-b18a-f05801372bd9", "address": "fa:16:3e:d6:07:e0", "network": {"id": "cc3313cf-b508-4050-8a32-ad85e5dffe2a", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-194406996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1a7a5308254182b0a08cbe226ab393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb84f935-3e", "ovs_interfaceid": "db84f935-3ee0-4a20-b18a-f05801372bd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.118070] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.118297] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.118668] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 974.119505] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79d18b50-6d37-4a79-b6fd-a51a13c19837 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.122237] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for the task: (returnval){ [ 974.122237] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52755484-1483-0a64-584e-72c1c4585b80" [ 974.122237] env[61898]: _type = "Task" [ 974.122237] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.126995] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 974.126995] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]523ecf33-5346-f013-c4a1-9406b8c4492a" [ 974.126995] env[61898]: _type = "Task" [ 974.126995] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.135330] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52755484-1483-0a64-584e-72c1c4585b80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.141382] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523ecf33-5346-f013-c4a1-9406b8c4492a, 'name': SearchDatastore_Task, 'duration_secs': 0.008066} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.141532] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.141758] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 974.142021] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.256858] env[61898]: DEBUG nova.scheduler.client.report [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 974.259991] env[61898]: DEBUG nova.network.neutron [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Successfully created port: 7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 974.305813] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.307508] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.310022] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.310022] env[61898]: DEBUG oslo_concurrency.lockutils [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] Acquired lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.310022] env[61898]: DEBUG nova.network.neutron [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Refreshing network info cache for port 1b2175da-a7e5-4786-a4f6-780fb83f447c {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 974.310736] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5319281b-b5aa-46b5-bd9e-61be92ab3618 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.330972] env[61898]: DEBUG nova.virt.hardware [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 974.331880] env[61898]: DEBUG nova.virt.hardware [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 974.331880] env[61898]: DEBUG nova.virt.hardware [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 974.331880] env[61898]: DEBUG nova.virt.hardware [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 974.331880] env[61898]: DEBUG nova.virt.hardware [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 974.332124] env[61898]: DEBUG nova.virt.hardware [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 974.332292] env[61898]: DEBUG nova.virt.hardware [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 974.332496] env[61898]: DEBUG nova.virt.hardware [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 974.332712] env[61898]: DEBUG nova.virt.hardware [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 974.332926] env[61898]: DEBUG nova.virt.hardware [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 974.333157] env[61898]: DEBUG nova.virt.hardware [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 974.339742] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Reconfiguring VM to attach interface {{(pid=61898) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 974.340839] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d74236e8-18de-4387-a646-bcc2b6770c92 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.361537] env[61898]: DEBUG oslo_vmware.api [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 974.361537] env[61898]: value = "task-1241099" [ 974.361537] env[61898]: _type = "Task" [ 974.361537] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.377173] env[61898]: DEBUG oslo_vmware.api [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241099, 'name': ReconfigVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.382862] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.619893] env[61898]: DEBUG oslo_concurrency.lockutils [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] Releasing lock "refresh_cache-06c894a2-9236-4534-922f-4255c6cf0531" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.623096] env[61898]: DEBUG nova.compute.manager [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Received event network-vif-plugged-4d73c23b-b607-471d-a628-1fcb200b386c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 974.623096] env[61898]: DEBUG oslo_concurrency.lockutils [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] Acquiring lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.623096] env[61898]: DEBUG oslo_concurrency.lockutils [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.623096] env[61898]: DEBUG oslo_concurrency.lockutils [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.623096] env[61898]: DEBUG nova.compute.manager [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] No waiting events found dispatching network-vif-plugged-4d73c23b-b607-471d-a628-1fcb200b386c {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 974.623096] env[61898]: WARNING nova.compute.manager [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Received unexpected event network-vif-plugged-4d73c23b-b607-471d-a628-1fcb200b386c for instance with vm_state building and task_state spawning. [ 974.623096] env[61898]: DEBUG nova.compute.manager [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Received event network-changed-4d73c23b-b607-471d-a628-1fcb200b386c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 974.623096] env[61898]: DEBUG nova.compute.manager [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Refreshing instance network info cache due to event network-changed-4d73c23b-b607-471d-a628-1fcb200b386c. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 974.623096] env[61898]: DEBUG oslo_concurrency.lockutils [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] Acquiring lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.623096] env[61898]: DEBUG oslo_concurrency.lockutils [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] Acquired lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.623096] env[61898]: DEBUG nova.network.neutron [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Refreshing network info cache for port 4d73c23b-b607-471d-a628-1fcb200b386c {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 974.639812] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52755484-1483-0a64-584e-72c1c4585b80, 'name': SearchDatastore_Task, 'duration_secs': 0.009091} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.641127] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.641127] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 06c894a2-9236-4534-922f-4255c6cf0531/06c894a2-9236-4534-922f-4255c6cf0531.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 974.641127] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.641127] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 974.643257] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd779338-f10b-4915-bec8-c43ac6dce960 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.645353] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8962b6f-5c8f-4469-bb31-e087e4b28eae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.653172] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for the task: (returnval){ [ 974.653172] env[61898]: value = "task-1241100" [ 974.653172] env[61898]: _type = "Task" [ 974.653172] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.654588] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 974.654588] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 974.659838] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e38877e-0e6a-41ad-8c9c-bd8b5771c430 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.665474] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.666734] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 974.666734] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]523a8ba9-db46-3fb8-963e-1abf5e4205f7" [ 974.666734] env[61898]: _type = "Task" [ 974.666734] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.674618] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523a8ba9-db46-3fb8-963e-1abf5e4205f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.765610] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.859s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.766641] env[61898]: DEBUG oslo_concurrency.lockutils [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.665s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.767355] env[61898]: DEBUG nova.objects.instance [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lazy-loading 'resources' on Instance uuid cf428138-4d0d-43bf-a654-06a62a82c9a1 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 974.791026] env[61898]: INFO nova.scheduler.client.report [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted allocations for instance cd1335b7-78b7-4cea-add7-dd69736067b0 [ 974.872919] env[61898]: DEBUG oslo_vmware.api [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241099, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.927374] env[61898]: DEBUG nova.compute.manager [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 974.933590] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.933757] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquired lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.933931] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Forcefully refreshing network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 974.960603] env[61898]: DEBUG nova.virt.hardware [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 974.960909] env[61898]: DEBUG nova.virt.hardware [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 974.961132] env[61898]: DEBUG nova.virt.hardware [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 974.961406] env[61898]: DEBUG nova.virt.hardware [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 974.961586] env[61898]: DEBUG nova.virt.hardware [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 974.961799] env[61898]: DEBUG nova.virt.hardware [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 974.962045] env[61898]: DEBUG nova.virt.hardware [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 974.962221] env[61898]: DEBUG nova.virt.hardware [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 974.962419] env[61898]: DEBUG nova.virt.hardware [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 974.962681] env[61898]: DEBUG nova.virt.hardware [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 974.962933] env[61898]: DEBUG nova.virt.hardware [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 974.963905] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b8e427-68ec-45c9-a885-42987f03132d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.976085] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d02b447-3d72-48f6-a6d8-66a0614ec860 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.995343] env[61898]: DEBUG nova.network.neutron [-] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.061841] env[61898]: DEBUG nova.network.neutron [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updated VIF entry in instance network info cache for port 1b2175da-a7e5-4786-a4f6-780fb83f447c. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 975.062357] env[61898]: DEBUG nova.network.neutron [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updating instance_info_cache with network_info: [{"id": "4bed7107-cc7d-431f-a835-84a51f188455", "address": "fa:16:3e:44:b0:e6", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bed7107-cc", "ovs_interfaceid": "4bed7107-cc7d-431f-a835-84a51f188455", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1b2175da-a7e5-4786-a4f6-780fb83f447c", "address": "fa:16:3e:c5:73:bd", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b2175da-a7", "ovs_interfaceid": "1b2175da-a7e5-4786-a4f6-780fb83f447c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.066808] env[61898]: DEBUG nova.compute.manager [req-b66f7b0e-3803-424c-b979-695601d95c4e req-4ecc3c2d-7ddd-43ca-b997-a750626bf700 service nova] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Received event network-vif-deleted-cd3bd232-226d-4ac0-a9f8-17b93aca92fb {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 975.167671] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241100, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.177814] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]523a8ba9-db46-3fb8-963e-1abf5e4205f7, 'name': SearchDatastore_Task, 'duration_secs': 0.017364} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.178749] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-757a7d9e-c994-43b0-a237-223ac336fc24 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.185907] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 975.185907] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52c62039-e7ba-ca10-9cdc-75059a28aa0f" [ 975.185907] env[61898]: _type = "Task" [ 975.185907] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.196964] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c62039-e7ba-ca10-9cdc-75059a28aa0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.300404] env[61898]: DEBUG oslo_concurrency.lockutils [None req-93209f83-68d4-4646-9737-0c1e5c7a54ee tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "cd1335b7-78b7-4cea-add7-dd69736067b0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.576s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.372878] env[61898]: DEBUG oslo_vmware.api [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241099, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.499346] env[61898]: INFO nova.compute.manager [-] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Took 1.47 seconds to deallocate network for instance. [ 975.514509] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a4cd2e-3eac-49f9-85d4-1660e4b0e013 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.526362] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c495fc-5754-454c-8266-53cc11f9b4a5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.558940] env[61898]: DEBUG nova.network.neutron [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updated VIF entry in instance network info cache for port 4d73c23b-b607-471d-a628-1fcb200b386c. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 975.559340] env[61898]: DEBUG nova.network.neutron [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance_info_cache with network_info: [{"id": "4d73c23b-b607-471d-a628-1fcb200b386c", "address": "fa:16:3e:da:c8:ce", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d73c23b-b6", "ovs_interfaceid": "4d73c23b-b607-471d-a628-1fcb200b386c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.563558] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d891acae-2e5d-4ec9-92c5-f6257d547047 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.564676] env[61898]: DEBUG oslo_concurrency.lockutils [req-facb4579-9434-4f1a-b401-75c4f44df378 req-e675310e-f1f9-445e-bf4f-f49085e6cc3e service nova] Releasing lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.573537] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7789600d-815f-4d2c-bdfa-0df85a5001b1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.589781] env[61898]: DEBUG nova.compute.provider_tree [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.666275] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241100, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.603168} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.666792] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 06c894a2-9236-4534-922f-4255c6cf0531/06c894a2-9236-4534-922f-4255c6cf0531.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 975.666792] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 975.667010] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5488a9e-24bd-481e-829d-4b702f72310a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.673721] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for the task: (returnval){ [ 975.673721] env[61898]: value = "task-1241101" [ 975.673721] env[61898]: _type = "Task" [ 975.673721] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.681145] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241101, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.694941] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c62039-e7ba-ca10-9cdc-75059a28aa0f, 'name': SearchDatastore_Task, 'duration_secs': 0.054123} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.695290] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.695560] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c/008bab4f-240b-4cb7-86eb-9b1f01ea6e4c.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 975.695829] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df4fbfe2-f592-412a-84ac-e505210c8514 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.701728] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 975.701728] env[61898]: value = "task-1241102" [ 975.701728] env[61898]: _type = "Task" [ 975.701728] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.709449] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.872969] env[61898]: DEBUG oslo_vmware.api [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241099, 'name': ReconfigVM_Task, 'duration_secs': 1.143664} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.873523] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.873796] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Reconfigured VM to attach interface {{(pid=61898) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 976.007957] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.047338] env[61898]: DEBUG nova.network.neutron [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Successfully updated port: 7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 976.065431] env[61898]: DEBUG oslo_concurrency.lockutils [req-6094a6b6-2ff8-471e-ab55-ee34a1e54bed req-849ddcc5-dc28-46f4-8378-dec3b5a05b32 service nova] Releasing lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.092122] env[61898]: DEBUG nova.scheduler.client.report [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 976.168692] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updating instance_info_cache with network_info: [{"id": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "address": "fa:16:3e:be:2b:9a", "network": {"id": "836f808c-afe9-455a-8f4d-35be9914ff4b", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1769785219-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11539a8a92af4208a15e69afe3dc60e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53a6375d-a9", "ovs_interfaceid": "53a6375d-a9c3-4c2e-8568-942c3c43bf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.187471] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241101, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.138639} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.187805] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 976.188798] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3fa9b0-c2b6-43b1-b70d-82e589bd7fb7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.213746] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 06c894a2-9236-4534-922f-4255c6cf0531/06c894a2-9236-4534-922f-4255c6cf0531.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 976.217141] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef0bcd6e-f081-4c13-a952-9ca6faae8ad4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.238737] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.240111] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for the task: (returnval){ [ 976.240111] env[61898]: value = "task-1241103" [ 976.240111] env[61898]: _type = "Task" [ 976.240111] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.248015] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241103, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.326598] env[61898]: DEBUG nova.compute.manager [req-e13c692e-94cf-475d-82a5-f9305b7537e1 req-b37078d0-4ea0-441a-93bc-29bb46a9af6d service nova] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Received event network-vif-plugged-7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 976.326656] env[61898]: DEBUG oslo_concurrency.lockutils [req-e13c692e-94cf-475d-82a5-f9305b7537e1 req-b37078d0-4ea0-441a-93bc-29bb46a9af6d service nova] Acquiring lock "7eb0d534-90c8-439d-a894-3f03151ac74b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.327289] env[61898]: DEBUG oslo_concurrency.lockutils [req-e13c692e-94cf-475d-82a5-f9305b7537e1 req-b37078d0-4ea0-441a-93bc-29bb46a9af6d service nova] Lock "7eb0d534-90c8-439d-a894-3f03151ac74b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.327289] env[61898]: DEBUG oslo_concurrency.lockutils [req-e13c692e-94cf-475d-82a5-f9305b7537e1 req-b37078d0-4ea0-441a-93bc-29bb46a9af6d service nova] Lock "7eb0d534-90c8-439d-a894-3f03151ac74b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.327289] env[61898]: DEBUG nova.compute.manager [req-e13c692e-94cf-475d-82a5-f9305b7537e1 req-b37078d0-4ea0-441a-93bc-29bb46a9af6d service nova] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] No waiting events found dispatching network-vif-plugged-7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 976.327475] env[61898]: WARNING nova.compute.manager [req-e13c692e-94cf-475d-82a5-f9305b7537e1 req-b37078d0-4ea0-441a-93bc-29bb46a9af6d service nova] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Received unexpected event network-vif-plugged-7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2 for instance with vm_state building and task_state spawning. [ 976.378696] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7f1f5708-049d-4e79-a5b1-cf1904e0584d tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-4522f4ef-c8f6-4fe1-acd5-796f87f22839-1b2175da-a7e5-4786-a4f6-780fb83f447c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.736s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.551292] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "refresh_cache-7eb0d534-90c8-439d-a894-3f03151ac74b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.553688] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "refresh_cache-7eb0d534-90c8-439d-a894-3f03151ac74b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.553688] env[61898]: DEBUG nova.network.neutron [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 976.597496] env[61898]: DEBUG oslo_concurrency.lockutils [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.831s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.600769] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.924s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.603623] env[61898]: INFO nova.compute.claims [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 976.623239] env[61898]: INFO nova.scheduler.client.report [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleted allocations for instance cf428138-4d0d-43bf-a654-06a62a82c9a1 [ 976.671020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Releasing lock "refresh_cache-45b8dc91-b577-4548-bf3a-32c7c936c616" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.671403] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updated the network info_cache for instance {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 976.671817] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.672037] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.672243] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.672454] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.672693] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.672910] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.673120] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61898) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 976.673288] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.712784] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241102, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.749908] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.770695] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.771060] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.084111] env[61898]: DEBUG nova.network.neutron [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 977.131792] env[61898]: DEBUG oslo_concurrency.lockutils [None req-41aa76d2-c3dc-4372-afe0-a72dbad7eb62 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "cf428138-4d0d-43bf-a654-06a62a82c9a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.474s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.177019] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.213728] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241102, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.052883} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.214019] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c/008bab4f-240b-4cb7-86eb-9b1f01ea6e4c.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 977.214242] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 977.214501] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d09ec53c-1a30-472c-9040-02c46e04a588 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.220749] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 977.220749] env[61898]: value = "task-1241104" [ 977.220749] env[61898]: _type = "Task" [ 977.220749] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.221604] env[61898]: DEBUG nova.network.neutron [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Updating instance_info_cache with network_info: [{"id": "7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2", "address": "fa:16:3e:1a:57:e4", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c600ee7-db", "ovs_interfaceid": "7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.230683] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241104, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.258733] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241103, 'name': ReconfigVM_Task, 'duration_secs': 0.866046} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.258733] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 06c894a2-9236-4534-922f-4255c6cf0531/06c894a2-9236-4534-922f-4255c6cf0531.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.258733] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-938c0710-81c9-4bff-a7df-5086142170d0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.269024] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for the task: (returnval){ [ 977.269024] env[61898]: value = "task-1241105" [ 977.269024] env[61898]: _type = "Task" [ 977.269024] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.272766] env[61898]: DEBUG nova.compute.manager [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 977.280754] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241105, 'name': Rename_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.727235] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "refresh_cache-7eb0d534-90c8-439d-a894-3f03151ac74b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.727582] env[61898]: DEBUG nova.compute.manager [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Instance network_info: |[{"id": "7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2", "address": "fa:16:3e:1a:57:e4", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c600ee7-db", "ovs_interfaceid": "7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 977.727949] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:57:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60567ee6-01d0-4b16-9c7a-4a896827d6eb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 977.735473] env[61898]: DEBUG oslo.service.loopingcall [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 977.738132] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 977.741373] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea937b46-2f4e-44e5-b0ab-3800e76091de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.756128] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241104, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066027} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.758589] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 977.759931] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85974bac-4162-436f-b579-eb170743c0be {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.765383] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 977.765383] env[61898]: value = "task-1241106" [ 977.765383] env[61898]: _type = "Task" [ 977.765383] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.783014] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c/008bab4f-240b-4cb7-86eb-9b1f01ea6e4c.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.793130] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70cb5746-3235-4059-b6ed-c33ad1bac735 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.815282] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241106, 'name': CreateVM_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.819086] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241105, 'name': Rename_Task, 'duration_secs': 0.26896} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.819427] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 977.819427] env[61898]: value = "task-1241107" [ 977.819427] env[61898]: _type = "Task" [ 977.819427] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.819676] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 977.820718] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.820956] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0860778a-8bef-4d64-a163-dad6ea61b850 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.832120] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for the task: (returnval){ [ 977.832120] env[61898]: value = "task-1241108" [ 977.832120] env[61898]: _type = "Task" [ 977.832120] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.835516] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.845118] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241108, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.853786] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc47e47-b9b9-4c38-8740-efb0fcefabd7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.861890] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c65b29-4358-4274-857b-e1e074a8223b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.902316] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760530d6-4bc8-4386-aafb-089d5991905a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.910205] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61837e4-845e-440c-b8b9-27306a454868 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.928736] env[61898]: DEBUG nova.compute.provider_tree [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.028805] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "interface-4522f4ef-c8f6-4fe1-acd5-796f87f22839-1b2175da-a7e5-4786-a4f6-780fb83f447c" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.029041] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-4522f4ef-c8f6-4fe1-acd5-796f87f22839-1b2175da-a7e5-4786-a4f6-780fb83f447c" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.284445] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241106, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.328636] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241107, 'name': ReconfigVM_Task, 'duration_secs': 0.505472} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.328948] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c/008bab4f-240b-4cb7-86eb-9b1f01ea6e4c.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 978.329607] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9c7e6158-ff10-461b-81e1-09e7dfb613e7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.335024] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 978.335024] env[61898]: value = "task-1241109" [ 978.335024] env[61898]: _type = "Task" [ 978.335024] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.345341] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241109, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.348471] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241108, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.355501] env[61898]: DEBUG nova.compute.manager [req-af46bd15-6b59-4a3e-abff-f63ce1ee4b32 req-f20982f1-48ed-4b4c-937c-f043d517df67 service nova] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Received event network-changed-7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 978.355720] env[61898]: DEBUG nova.compute.manager [req-af46bd15-6b59-4a3e-abff-f63ce1ee4b32 req-f20982f1-48ed-4b4c-937c-f043d517df67 service nova] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Refreshing instance network info cache due to event network-changed-7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 978.355991] env[61898]: DEBUG oslo_concurrency.lockutils [req-af46bd15-6b59-4a3e-abff-f63ce1ee4b32 req-f20982f1-48ed-4b4c-937c-f043d517df67 service nova] Acquiring lock "refresh_cache-7eb0d534-90c8-439d-a894-3f03151ac74b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.356111] env[61898]: DEBUG oslo_concurrency.lockutils [req-af46bd15-6b59-4a3e-abff-f63ce1ee4b32 req-f20982f1-48ed-4b4c-937c-f043d517df67 service nova] Acquired lock "refresh_cache-7eb0d534-90c8-439d-a894-3f03151ac74b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.356527] env[61898]: DEBUG nova.network.neutron [req-af46bd15-6b59-4a3e-abff-f63ce1ee4b32 req-f20982f1-48ed-4b4c-937c-f043d517df67 service nova] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Refreshing network info cache for port 7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 978.432749] env[61898]: DEBUG nova.scheduler.client.report [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 978.531939] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.532145] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.533351] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c311ff5a-b08a-4453-b77f-60276000af31 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.551669] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a335013-5072-4f87-8604-480e361706dc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.578205] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Reconfiguring VM to detach interface {{(pid=61898) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 978.578545] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aec53c3a-d048-41c8-bf03-c62ea5784a64 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.597086] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 978.597086] env[61898]: value = "task-1241110" [ 978.597086] env[61898]: _type = "Task" [ 978.597086] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.604551] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.825506] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241106, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.852291] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241109, 'name': Rename_Task, 'duration_secs': 0.133388} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.852439] env[61898]: DEBUG oslo_vmware.api [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241108, 'name': PowerOnVM_Task, 'duration_secs': 0.708886} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.852681] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 978.852931] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 978.853157] env[61898]: INFO nova.compute.manager [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Took 10.53 seconds to spawn the instance on the hypervisor. [ 978.853331] env[61898]: DEBUG nova.compute.manager [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 978.855899] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17f5c7b7-1a1a-4e7c-9ce9-65d3add603a6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.855899] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da78dac-85ec-4c87-8cb3-a2f3c000c572 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.867834] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 978.867834] env[61898]: value = "task-1241111" [ 978.867834] env[61898]: _type = "Task" [ 978.867834] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.875804] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241111, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.939300] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.338s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.940412] env[61898]: DEBUG nova.compute.manager [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 978.943209] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 7.966s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.106794] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.124107] env[61898]: DEBUG nova.network.neutron [req-af46bd15-6b59-4a3e-abff-f63ce1ee4b32 req-f20982f1-48ed-4b4c-937c-f043d517df67 service nova] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Updated VIF entry in instance network info cache for port 7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 979.124488] env[61898]: DEBUG nova.network.neutron [req-af46bd15-6b59-4a3e-abff-f63ce1ee4b32 req-f20982f1-48ed-4b4c-937c-f043d517df67 service nova] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Updating instance_info_cache with network_info: [{"id": "7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2", "address": "fa:16:3e:1a:57:e4", "network": {"id": "b5e210b6-e50e-4f19-b9b7-faf720ceadc9", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1617620554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e8b71885c83418fb13e216f804ffeeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60567ee6-01d0-4b16-9c7a-4a896827d6eb", "external-id": "nsx-vlan-transportzone-28", "segmentation_id": 28, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c600ee7-db", "ovs_interfaceid": "7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.286510] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241106, 'name': CreateVM_Task, 'duration_secs': 1.494999} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.286686] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 979.287487] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.287675] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.288008] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 979.288281] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-604bb917-e975-4928-be53-36462582d106 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.292687] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 979.292687] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521b41f6-04a5-e749-b77a-ecfd1ef5ada6" [ 979.292687] env[61898]: _type = "Task" [ 979.292687] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.300133] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521b41f6-04a5-e749-b77a-ecfd1ef5ada6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.378961] env[61898]: INFO nova.compute.manager [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Took 19.68 seconds to build instance. [ 979.384515] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241111, 'name': PowerOnVM_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.448916] env[61898]: INFO nova.compute.claims [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.454825] env[61898]: DEBUG nova.compute.utils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 979.456748] env[61898]: DEBUG nova.compute.manager [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 979.457096] env[61898]: DEBUG nova.network.neutron [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 979.508221] env[61898]: DEBUG nova.policy [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a8787797d37411da61d78cc1f8c8597', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2db51d8ea4d545eb81116e448a6ef363', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 979.607794] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.627301] env[61898]: DEBUG oslo_concurrency.lockutils [req-af46bd15-6b59-4a3e-abff-f63ce1ee4b32 req-f20982f1-48ed-4b4c-937c-f043d517df67 service nova] Releasing lock "refresh_cache-7eb0d534-90c8-439d-a894-3f03151ac74b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.780642] env[61898]: DEBUG nova.network.neutron [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Successfully created port: 9240c0e2-0156-43f9-bb96-d168e811a19c {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 979.804869] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521b41f6-04a5-e749-b77a-ecfd1ef5ada6, 'name': SearchDatastore_Task, 'duration_secs': 0.014259} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.805222] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.805650] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 979.805954] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.806121] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.806304] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 979.806613] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4266a0d1-dd43-458e-a337-9166a342dc9f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.821420] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 979.821420] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 979.822120] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5eca6764-4772-4965-816e-8e4a8c943462 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.827273] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 979.827273] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525377c9-b3c4-a9f2-642c-7837fa3a2bd5" [ 979.827273] env[61898]: _type = "Task" [ 979.827273] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.834928] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525377c9-b3c4-a9f2-642c-7837fa3a2bd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.876881] env[61898]: DEBUG oslo_vmware.api [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241111, 'name': PowerOnVM_Task, 'duration_secs': 0.524284} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.877184] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 979.877795] env[61898]: INFO nova.compute.manager [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Took 9.17 seconds to spawn the instance on the hypervisor. [ 979.877795] env[61898]: DEBUG nova.compute.manager [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 979.878432] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0dfd83f-a95f-4212-a51e-69357235d008 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.887215] env[61898]: DEBUG oslo_concurrency.lockutils [None req-33d5be9b-7f6c-4840-b57c-a2e091048d70 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Lock "06c894a2-9236-4534-922f-4255c6cf0531" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.202s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.955477] env[61898]: INFO nova.compute.resource_tracker [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating resource usage from migration 8c71a041-862c-4011-981e-e1a3c351dce3 [ 979.960268] env[61898]: DEBUG nova.compute.manager [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 980.110637] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.137524] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9695a9b0-1193-4b51-8d82-2efb4c1e0f08 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.144772] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc858c36-64df-47c4-8be7-a33d861e404d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.174314] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be32f913-fbd7-403f-b7ca-85329689c91d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.182067] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e7100a-a0a6-4102-8248-ceb5b238ea66 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.196995] env[61898]: DEBUG nova.compute.provider_tree [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.338480] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525377c9-b3c4-a9f2-642c-7837fa3a2bd5, 'name': SearchDatastore_Task, 'duration_secs': 0.04623} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.339701] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39bf4004-3f2e-485d-ba02-7b523f9e714f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.344956] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 980.344956] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5294a7dd-7733-2534-1e97-0b69b517e3d1" [ 980.344956] env[61898]: _type = "Task" [ 980.344956] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.352350] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5294a7dd-7733-2534-1e97-0b69b517e3d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.396835] env[61898]: INFO nova.compute.manager [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Took 19.44 seconds to build instance. [ 980.609624] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.700377] env[61898]: DEBUG nova.scheduler.client.report [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 980.784878] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquiring lock "06c894a2-9236-4534-922f-4255c6cf0531" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.784878] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Lock "06c894a2-9236-4534-922f-4255c6cf0531" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.784998] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquiring lock "06c894a2-9236-4534-922f-4255c6cf0531-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.785621] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Lock "06c894a2-9236-4534-922f-4255c6cf0531-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.785621] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Lock "06c894a2-9236-4534-922f-4255c6cf0531-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.789031] env[61898]: INFO nova.compute.manager [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Terminating instance [ 980.854828] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5294a7dd-7733-2534-1e97-0b69b517e3d1, 'name': SearchDatastore_Task, 'duration_secs': 0.036966} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.855157] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.855462] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 7eb0d534-90c8-439d-a894-3f03151ac74b/7eb0d534-90c8-439d-a894-3f03151ac74b.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 980.855954] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5405eb46-17bf-48cb-8e0e-606069d5f4e9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.862888] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 980.862888] env[61898]: value = "task-1241112" [ 980.862888] env[61898]: _type = "Task" [ 980.862888] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.871557] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241112, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.901174] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e0ed7075-2293-473c-b6f6-d9b188dce048 tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.950s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.972026] env[61898]: DEBUG nova.compute.manager [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 980.983408] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "45b8dc91-b577-4548-bf3a-32c7c936c616" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.983785] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.984056] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.984263] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.984468] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.986771] env[61898]: INFO nova.compute.manager [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Terminating instance [ 980.999525] env[61898]: DEBUG nova.virt.hardware [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 980.999668] env[61898]: DEBUG nova.virt.hardware [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 981.000730] env[61898]: DEBUG nova.virt.hardware [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 981.000730] env[61898]: DEBUG nova.virt.hardware [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 981.000730] env[61898]: DEBUG nova.virt.hardware [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 981.000730] env[61898]: DEBUG nova.virt.hardware [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 981.000730] env[61898]: DEBUG nova.virt.hardware [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 981.000730] env[61898]: DEBUG nova.virt.hardware [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 981.000940] env[61898]: DEBUG nova.virt.hardware [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 981.000940] env[61898]: DEBUG nova.virt.hardware [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 981.001190] env[61898]: DEBUG nova.virt.hardware [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 981.002463] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01d049a-f41d-46ef-b8c0-bcf0e00beec3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.012820] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8800de6-906a-4037-b7c0-89e296ab4b20 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.111580] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.206045] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.263s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.206172] env[61898]: INFO nova.compute.manager [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Migrating [ 981.218046] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.835s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.219671] env[61898]: INFO nova.compute.claims [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 981.223701] env[61898]: DEBUG nova.compute.manager [req-3cbb1367-9718-484e-b9bb-b0e310f68f63 req-620318d1-cf4e-42b5-b0bd-77c544399ed8 service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Received event network-vif-plugged-9240c0e2-0156-43f9-bb96-d168e811a19c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 981.223974] env[61898]: DEBUG oslo_concurrency.lockutils [req-3cbb1367-9718-484e-b9bb-b0e310f68f63 req-620318d1-cf4e-42b5-b0bd-77c544399ed8 service nova] Acquiring lock "587c9997-3b6d-4654-9cf3-f181833c0728-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.224256] env[61898]: DEBUG oslo_concurrency.lockutils [req-3cbb1367-9718-484e-b9bb-b0e310f68f63 req-620318d1-cf4e-42b5-b0bd-77c544399ed8 service nova] Lock "587c9997-3b6d-4654-9cf3-f181833c0728-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.224467] env[61898]: DEBUG oslo_concurrency.lockutils [req-3cbb1367-9718-484e-b9bb-b0e310f68f63 req-620318d1-cf4e-42b5-b0bd-77c544399ed8 service nova] Lock "587c9997-3b6d-4654-9cf3-f181833c0728-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.224701] env[61898]: DEBUG nova.compute.manager [req-3cbb1367-9718-484e-b9bb-b0e310f68f63 req-620318d1-cf4e-42b5-b0bd-77c544399ed8 service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] No waiting events found dispatching network-vif-plugged-9240c0e2-0156-43f9-bb96-d168e811a19c {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 981.224946] env[61898]: WARNING nova.compute.manager [req-3cbb1367-9718-484e-b9bb-b0e310f68f63 req-620318d1-cf4e-42b5-b0bd-77c544399ed8 service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Received unexpected event network-vif-plugged-9240c0e2-0156-43f9-bb96-d168e811a19c for instance with vm_state building and task_state spawning. [ 981.294895] env[61898]: DEBUG nova.compute.manager [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 981.295162] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 981.296127] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695438a6-d412-46bf-bf16-285770479fc1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.304820] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.304820] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2628e0a-fe1e-46af-9d77-5c559deff295 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.312555] env[61898]: DEBUG oslo_vmware.api [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for the task: (returnval){ [ 981.312555] env[61898]: value = "task-1241113" [ 981.312555] env[61898]: _type = "Task" [ 981.312555] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.320622] env[61898]: DEBUG oslo_vmware.api [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241113, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.373130] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241112, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450906} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.373453] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 7eb0d534-90c8-439d-a894-3f03151ac74b/7eb0d534-90c8-439d-a894-3f03151ac74b.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 981.373714] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 981.374148] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6e77279-5373-439f-855a-16d36d466888 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.380204] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 981.380204] env[61898]: value = "task-1241114" [ 981.380204] env[61898]: _type = "Task" [ 981.380204] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.387810] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241114, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.491420] env[61898]: DEBUG nova.compute.manager [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 981.491619] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 981.492536] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d492dc3e-2d8a-4c6d-a653-ddd9472d8010 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.500620] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.500868] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8b9470a-da74-441b-abcd-a02ee2457522 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.506674] env[61898]: DEBUG oslo_vmware.api [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 981.506674] env[61898]: value = "task-1241115" [ 981.506674] env[61898]: _type = "Task" [ 981.506674] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.516507] env[61898]: DEBUG oslo_vmware.api [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.610626] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.663883] env[61898]: DEBUG nova.compute.manager [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Stashing vm_state: active {{(pid=61898) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 981.733623] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.733705] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.734337] env[61898]: DEBUG nova.network.neutron [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.780746] env[61898]: DEBUG nova.network.neutron [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Successfully updated port: 9240c0e2-0156-43f9-bb96-d168e811a19c {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 981.806116] env[61898]: DEBUG nova.compute.manager [req-53b0135c-baee-45f6-9024-c51f35be4a7f req-e0ea3864-53e5-46da-bf66-aec13a063127 service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Received event network-changed-9240c0e2-0156-43f9-bb96-d168e811a19c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 981.806357] env[61898]: DEBUG nova.compute.manager [req-53b0135c-baee-45f6-9024-c51f35be4a7f req-e0ea3864-53e5-46da-bf66-aec13a063127 service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Refreshing instance network info cache due to event network-changed-9240c0e2-0156-43f9-bb96-d168e811a19c. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 981.806589] env[61898]: DEBUG oslo_concurrency.lockutils [req-53b0135c-baee-45f6-9024-c51f35be4a7f req-e0ea3864-53e5-46da-bf66-aec13a063127 service nova] Acquiring lock "refresh_cache-587c9997-3b6d-4654-9cf3-f181833c0728" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.806736] env[61898]: DEBUG oslo_concurrency.lockutils [req-53b0135c-baee-45f6-9024-c51f35be4a7f req-e0ea3864-53e5-46da-bf66-aec13a063127 service nova] Acquired lock "refresh_cache-587c9997-3b6d-4654-9cf3-f181833c0728" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.806900] env[61898]: DEBUG nova.network.neutron [req-53b0135c-baee-45f6-9024-c51f35be4a7f req-e0ea3864-53e5-46da-bf66-aec13a063127 service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Refreshing network info cache for port 9240c0e2-0156-43f9-bb96-d168e811a19c {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 981.822669] env[61898]: DEBUG oslo_vmware.api [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241113, 'name': PowerOffVM_Task, 'duration_secs': 0.174669} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.823193] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.823193] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 981.823351] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75cd268a-fc2e-4c64-97c0-917c8633b7ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.890841] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241114, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06383} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.892552] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 981.892552] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c653f1b-3a50-4e7d-9ae4-718937d5864f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.914391] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] 7eb0d534-90c8-439d-a894-3f03151ac74b/7eb0d534-90c8-439d-a894-3f03151ac74b.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 981.915762] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-215a3957-a0dc-4d39-a8eb-923467bd6a80 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.930167] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 981.930416] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 981.930600] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Deleting the datastore file [datastore2] 06c894a2-9236-4534-922f-4255c6cf0531 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 981.930864] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2252afd-577b-4252-838e-82389ae28a34 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.937641] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 981.937641] env[61898]: value = "task-1241117" [ 981.937641] env[61898]: _type = "Task" [ 981.937641] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.939249] env[61898]: DEBUG oslo_vmware.api [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for the task: (returnval){ [ 981.939249] env[61898]: value = "task-1241118" [ 981.939249] env[61898]: _type = "Task" [ 981.939249] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.951234] env[61898]: DEBUG oslo_vmware.api [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.954605] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241117, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.020087] env[61898]: DEBUG oslo_vmware.api [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241115, 'name': PowerOffVM_Task, 'duration_secs': 0.143553} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.020415] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.020606] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 982.020909] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-64e6adfd-cb11-4ae2-b57f-16dd6909df79 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.083421] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 982.083706] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 982.083944] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Deleting the datastore file [datastore1] 45b8dc91-b577-4548-bf3a-32c7c936c616 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.084277] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e71b3ea3-cc61-4f40-86a5-c6da1a583b61 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.090969] env[61898]: DEBUG oslo_vmware.api [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for the task: (returnval){ [ 982.090969] env[61898]: value = "task-1241120" [ 982.090969] env[61898]: _type = "Task" [ 982.090969] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.098453] env[61898]: DEBUG oslo_vmware.api [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.108525] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.186932] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.283307] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquiring lock "refresh_cache-587c9997-3b6d-4654-9cf3-f181833c0728" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.342478] env[61898]: DEBUG nova.network.neutron [req-53b0135c-baee-45f6-9024-c51f35be4a7f req-e0ea3864-53e5-46da-bf66-aec13a063127 service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 982.438574] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16abf72a-d6e1-4521-89a2-a5c7a947b65e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.453405] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b768f3-73f0-48bb-a0d9-8c52c95a8a54 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.456675] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241117, 'name': ReconfigVM_Task, 'duration_secs': 0.419094} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.457527] env[61898]: DEBUG nova.network.neutron [req-53b0135c-baee-45f6-9024-c51f35be4a7f req-e0ea3864-53e5-46da-bf66-aec13a063127 service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.461957] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Reconfigured VM instance instance-00000063 to attach disk [datastore2] 7eb0d534-90c8-439d-a894-3f03151ac74b/7eb0d534-90c8-439d-a894-3f03151ac74b.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 982.461957] env[61898]: DEBUG oslo_vmware.api [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Task: {'id': task-1241118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178256} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.462943] env[61898]: DEBUG nova.network.neutron [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance_info_cache with network_info: [{"id": "fa47b33a-e279-408b-bcd7-9165ff102179", "address": "fa:16:3e:82:72:df", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa47b33a-e2", "ovs_interfaceid": "fa47b33a-e279-408b-bcd7-9165ff102179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.464276] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-210cc53e-b5dc-40a2-8175-100b644e05ea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.465784] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.465977] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.466206] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.466409] env[61898]: INFO nova.compute.manager [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Took 1.17 seconds to destroy the instance on the hypervisor. [ 982.466660] env[61898]: DEBUG oslo.service.loopingcall [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.490262] env[61898]: DEBUG nova.compute.manager [-] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 982.490365] env[61898]: DEBUG nova.network.neutron [-] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.493857] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35d1566-188e-4137-9bed-b24769620e80 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.499607] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 982.499607] env[61898]: value = "task-1241121" [ 982.499607] env[61898]: _type = "Task" [ 982.499607] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.506050] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59d79f4-d438-4064-969d-9426fae0c451 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.514260] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241121, 'name': Rename_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.522293] env[61898]: DEBUG nova.compute.provider_tree [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.600792] env[61898]: DEBUG oslo_vmware.api [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Task: {'id': task-1241120, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16487} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.601185] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.601387] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.601567] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.601741] env[61898]: INFO nova.compute.manager [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Took 1.11 seconds to destroy the instance on the hypervisor. [ 982.601969] env[61898]: DEBUG oslo.service.loopingcall [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 982.605546] env[61898]: DEBUG nova.compute.manager [-] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 982.605660] env[61898]: DEBUG nova.network.neutron [-] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.613134] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.965422] env[61898]: DEBUG oslo_concurrency.lockutils [req-53b0135c-baee-45f6-9024-c51f35be4a7f req-e0ea3864-53e5-46da-bf66-aec13a063127 service nova] Releasing lock "refresh_cache-587c9997-3b6d-4654-9cf3-f181833c0728" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.965801] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquired lock "refresh_cache-587c9997-3b6d-4654-9cf3-f181833c0728" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.965962] env[61898]: DEBUG nova.network.neutron [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.967555] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.011140] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241121, 'name': Rename_Task, 'duration_secs': 0.146915} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.011429] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 983.011679] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a6d2bfd-88d6-4d10-884c-c1873c24a58d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.018094] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 983.018094] env[61898]: value = "task-1241122" [ 983.018094] env[61898]: _type = "Task" [ 983.018094] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.026337] env[61898]: DEBUG nova.scheduler.client.report [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 983.029472] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241122, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.073907] env[61898]: DEBUG nova.compute.manager [req-47ffb1ee-aa87-47b3-a465-03e603cf0dc4 req-49d5cb08-a5cd-4ddb-8730-e25027109d4c service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Received event network-vif-deleted-53a6375d-a9c3-4c2e-8568-942c3c43bf4a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 983.074132] env[61898]: INFO nova.compute.manager [req-47ffb1ee-aa87-47b3-a465-03e603cf0dc4 req-49d5cb08-a5cd-4ddb-8730-e25027109d4c service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Neutron deleted interface 53a6375d-a9c3-4c2e-8568-942c3c43bf4a; detaching it from the instance and deleting it from the info cache [ 983.074314] env[61898]: DEBUG nova.network.neutron [req-47ffb1ee-aa87-47b3-a465-03e603cf0dc4 req-49d5cb08-a5cd-4ddb-8730-e25027109d4c service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.112071] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.245056] env[61898]: DEBUG nova.network.neutron [-] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.478408] env[61898]: DEBUG nova.network.neutron [-] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.501255] env[61898]: DEBUG nova.network.neutron [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 983.528785] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241122, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.531612] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.314s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.532134] env[61898]: DEBUG nova.compute.manager [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 983.535084] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.527s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.535343] env[61898]: DEBUG nova.objects.instance [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lazy-loading 'resources' on Instance uuid 070bc0cc-ff77-48b8-bd08-f17fe69e25af {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.577073] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16b0e537-0d11-4f28-aedb-f3ded2bc8d26 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.586080] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b06448-d966-43eb-a128-7dca4e5e8ba9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.610654] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.619895] env[61898]: DEBUG nova.compute.manager [req-47ffb1ee-aa87-47b3-a465-03e603cf0dc4 req-49d5cb08-a5cd-4ddb-8730-e25027109d4c service nova] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Detach interface failed, port_id=53a6375d-a9c3-4c2e-8568-942c3c43bf4a, reason: Instance 45b8dc91-b577-4548-bf3a-32c7c936c616 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 983.676738] env[61898]: DEBUG nova.network.neutron [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Updating instance_info_cache with network_info: [{"id": "9240c0e2-0156-43f9-bb96-d168e811a19c", "address": "fa:16:3e:21:e0:9e", "network": {"id": "4732dadf-a67c-4255-a4f5-8328950ad5c5", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1595045161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db51d8ea4d545eb81116e448a6ef363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9240c0e2-01", "ovs_interfaceid": "9240c0e2-0156-43f9-bb96-d168e811a19c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.746976] env[61898]: INFO nova.compute.manager [-] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Took 1.26 seconds to deallocate network for instance. [ 983.830551] env[61898]: DEBUG nova.compute.manager [req-2a00b22a-d5ea-4bca-9010-bef27e45abd5 req-55a9784a-1e7f-4484-b960-c341fed0c5de service nova] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Received event network-vif-deleted-db84f935-3ee0-4a20-b18a-f05801372bd9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 983.981185] env[61898]: INFO nova.compute.manager [-] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Took 1.37 seconds to deallocate network for instance. [ 984.029411] env[61898]: DEBUG oslo_vmware.api [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241122, 'name': PowerOnVM_Task, 'duration_secs': 0.720711} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.029680] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 984.029885] env[61898]: INFO nova.compute.manager [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Took 9.10 seconds to spawn the instance on the hypervisor. [ 984.030082] env[61898]: DEBUG nova.compute.manager [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 984.030854] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991070ee-bfd1-4dfb-bc5a-05b9844ce9fa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.040191] env[61898]: DEBUG nova.compute.utils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 984.045303] env[61898]: DEBUG nova.compute.manager [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 984.045543] env[61898]: DEBUG nova.network.neutron [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 984.113461] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task} progress is 18%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.159035] env[61898]: DEBUG nova.policy [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ce8ddf4b7fe4e0583f09e7f88ab5e70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '975e564bd7f442629018b97007460e00', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 984.181864] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Releasing lock "refresh_cache-587c9997-3b6d-4654-9cf3-f181833c0728" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.182219] env[61898]: DEBUG nova.compute.manager [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Instance network_info: |[{"id": "9240c0e2-0156-43f9-bb96-d168e811a19c", "address": "fa:16:3e:21:e0:9e", "network": {"id": "4732dadf-a67c-4255-a4f5-8328950ad5c5", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1595045161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2db51d8ea4d545eb81116e448a6ef363", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9240c0e2-01", "ovs_interfaceid": "9240c0e2-0156-43f9-bb96-d168e811a19c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 984.185442] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:e0:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9240c0e2-0156-43f9-bb96-d168e811a19c', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 984.195528] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Creating folder: Project (2db51d8ea4d545eb81116e448a6ef363). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 984.196810] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59014277-1ae4-4afd-aca0-a7d9b0253529 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.206599] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Created folder: Project (2db51d8ea4d545eb81116e448a6ef363) in parent group-v267550. [ 984.206752] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Creating folder: Instances. Parent ref: group-v267707. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 984.206999] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66cc61f2-719e-4a61-85d1-702da325b06e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.218948] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Created folder: Instances in parent group-v267707. [ 984.219249] env[61898]: DEBUG oslo.service.loopingcall [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 984.219462] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 984.219702] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f05e1eef-eb3e-409b-82a2-bcc590338eec {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.243257] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 984.243257] env[61898]: value = "task-1241125" [ 984.243257] env[61898]: _type = "Task" [ 984.243257] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.251388] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241125, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.255579] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.287201] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8217c0-e43b-44e0-89a0-72c3bce0f42e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.294905] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b4acfd-0879-4532-a471-f7b59f56e940 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.328365] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18f76ed-ffa8-4c8e-8ebe-4adabbdfdb2f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.336275] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9308d999-1d37-436c-ba69-ae066db66017 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.350956] env[61898]: DEBUG nova.compute.provider_tree [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.483234] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf25c44-aa23-4db1-9d20-cb38c564672c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.487185] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.504135] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance 'c26c4add-728c-45ea-8465-7c4273b7d97b' progress to 0 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 984.552136] env[61898]: DEBUG nova.compute.manager [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 984.558961] env[61898]: INFO nova.compute.manager [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Took 18.05 seconds to build instance. [ 984.577998] env[61898]: DEBUG nova.network.neutron [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Successfully created port: 630c2b2d-b17e-470f-ad5f-506c4734d40c {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 984.612036] env[61898]: DEBUG oslo_vmware.api [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241110, 'name': ReconfigVM_Task, 'duration_secs': 5.794411} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.612309] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.612553] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Reconfigured VM to detach interface {{(pid=61898) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 984.757020] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241125, 'name': CreateVM_Task, 'duration_secs': 0.322317} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.757020] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 984.757020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.757020] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.757305] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 984.757507] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cae97fa8-04b0-425d-b2da-9fb47f34db94 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.763089] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for the task: (returnval){ [ 984.763089] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525838bf-b514-71a6-ec28-8bb718f8a04d" [ 984.763089] env[61898]: _type = "Task" [ 984.763089] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.773312] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525838bf-b514-71a6-ec28-8bb718f8a04d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.854578] env[61898]: DEBUG nova.scheduler.client.report [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 985.010438] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.010726] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-364b6ee1-6a17-4eb9-9c20-e9a15448b2e9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.019447] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 985.019447] env[61898]: value = "task-1241126" [ 985.019447] env[61898]: _type = "Task" [ 985.019447] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.028170] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241126, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.061191] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a0eab0fb-f8a1-465d-ab38-67d18fce9cae tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "7eb0d534-90c8-439d-a894-3f03151ac74b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.564s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.273839] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525838bf-b514-71a6-ec28-8bb718f8a04d, 'name': SearchDatastore_Task, 'duration_secs': 0.019471} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.274173] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.274420] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.274659] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.274811] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.274993] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.275274] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e05d6d09-83e0-4799-a39e-d8b5488481f2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.283108] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 985.283311] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 985.284082] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7062fac-bdd3-4d1e-b9bb-ed346b71ed87 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.289936] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for the task: (returnval){ [ 985.289936] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b56744-e4fa-8d19-790c-d2d8c9fb423a" [ 985.289936] env[61898]: _type = "Task" [ 985.289936] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.297327] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b56744-e4fa-8d19-790c-d2d8c9fb423a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.360521] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.825s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.362997] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.186s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.363345] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.363615] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 985.363938] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.543s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.365456] env[61898]: INFO nova.compute.claims [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.369084] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944b80bd-b174-4465-ba37-db3b22edf410 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.376921] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af73453-0b7b-40ed-a9a7-1c1a44d5ea93 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.382635] env[61898]: INFO nova.scheduler.client.report [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleted allocations for instance 070bc0cc-ff77-48b8-bd08-f17fe69e25af [ 985.397646] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f615a0-ea74-4e11-aa47-c106c43373ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.407946] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62a92b4-9361-4b3e-a34c-5818eee701bd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.444442] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180019MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 985.444602] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.497818] env[61898]: DEBUG oslo_concurrency.lockutils [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.498169] env[61898]: DEBUG oslo_concurrency.lockutils [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.498452] env[61898]: DEBUG oslo_concurrency.lockutils [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.498658] env[61898]: DEBUG oslo_concurrency.lockutils [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.498847] env[61898]: DEBUG oslo_concurrency.lockutils [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.501068] env[61898]: INFO nova.compute.manager [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Terminating instance [ 985.529740] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241126, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.564387] env[61898]: DEBUG nova.compute.manager [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 985.586832] env[61898]: DEBUG nova.virt.hardware [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 985.587589] env[61898]: DEBUG nova.virt.hardware [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 985.587589] env[61898]: DEBUG nova.virt.hardware [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 985.587589] env[61898]: DEBUG nova.virt.hardware [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 985.587589] env[61898]: DEBUG nova.virt.hardware [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 985.587800] env[61898]: DEBUG nova.virt.hardware [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 985.588188] env[61898]: DEBUG nova.virt.hardware [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 985.588362] env[61898]: DEBUG nova.virt.hardware [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 985.588478] env[61898]: DEBUG nova.virt.hardware [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 985.588673] env[61898]: DEBUG nova.virt.hardware [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 985.589460] env[61898]: DEBUG nova.virt.hardware [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 985.589851] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4982357f-ecb4-47bc-9ce4-3a42d000b23e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.598546] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881c1d8d-eed4-46b7-b03b-c9e08180349a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.802616] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b56744-e4fa-8d19-790c-d2d8c9fb423a, 'name': SearchDatastore_Task, 'duration_secs': 0.009731} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.802616] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7dfa23b-42ab-43fc-81ad-2a7fa5216c79 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.807529] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for the task: (returnval){ [ 985.807529] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]528548f6-e4d6-c219-e910-6c6760f91b61" [ 985.807529] env[61898]: _type = "Task" [ 985.807529] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.817561] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]528548f6-e4d6-c219-e910-6c6760f91b61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.905556] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5142ec3d-bc73-4913-83b1-e84e45cde1d0 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "070bc0cc-ff77-48b8-bd08-f17fe69e25af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.536s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.947378] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.947585] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquired lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.947771] env[61898]: DEBUG nova.network.neutron [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.008146] env[61898]: DEBUG nova.compute.manager [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 986.008431] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 986.009765] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627f5c2c-98c9-4f9e-a63d-c393c7331b13 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.017642] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 986.017907] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65e08892-2cf3-43c1-a0ad-3b665f9c4995 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.024925] env[61898]: DEBUG oslo_vmware.api [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 986.024925] env[61898]: value = "task-1241127" [ 986.024925] env[61898]: _type = "Task" [ 986.024925] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.030908] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241126, 'name': PowerOffVM_Task, 'duration_secs': 0.726536} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.031984] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 986.032200] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance 'c26c4add-728c-45ea-8465-7c4273b7d97b' progress to 17 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 986.040401] env[61898]: DEBUG oslo_vmware.api [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241127, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.208189] env[61898]: DEBUG nova.compute.manager [req-8a77fd74-3773-4485-8b4f-5f5572bb9ddb req-588a7f5a-6556-4839-b96b-e395e7011b1a service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Received event network-vif-plugged-630c2b2d-b17e-470f-ad5f-506c4734d40c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 986.208189] env[61898]: DEBUG oslo_concurrency.lockutils [req-8a77fd74-3773-4485-8b4f-5f5572bb9ddb req-588a7f5a-6556-4839-b96b-e395e7011b1a service nova] Acquiring lock "d0184b78-1525-44a4-a515-3eeb34a59cde-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.208189] env[61898]: DEBUG oslo_concurrency.lockutils [req-8a77fd74-3773-4485-8b4f-5f5572bb9ddb req-588a7f5a-6556-4839-b96b-e395e7011b1a service nova] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.208189] env[61898]: DEBUG oslo_concurrency.lockutils [req-8a77fd74-3773-4485-8b4f-5f5572bb9ddb req-588a7f5a-6556-4839-b96b-e395e7011b1a service nova] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.208712] env[61898]: DEBUG nova.compute.manager [req-8a77fd74-3773-4485-8b4f-5f5572bb9ddb req-588a7f5a-6556-4839-b96b-e395e7011b1a service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] No waiting events found dispatching network-vif-plugged-630c2b2d-b17e-470f-ad5f-506c4734d40c {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 986.209031] env[61898]: WARNING nova.compute.manager [req-8a77fd74-3773-4485-8b4f-5f5572bb9ddb req-588a7f5a-6556-4839-b96b-e395e7011b1a service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Received unexpected event network-vif-plugged-630c2b2d-b17e-470f-ad5f-506c4734d40c for instance with vm_state building and task_state spawning. [ 986.210514] env[61898]: DEBUG nova.compute.manager [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 986.211806] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14c5074-2931-484a-a7b7-ff1ea115e1b2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.324647] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]528548f6-e4d6-c219-e910-6c6760f91b61, 'name': SearchDatastore_Task, 'duration_secs': 0.010236} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.325306] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.325674] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 587c9997-3b6d-4654-9cf3-f181833c0728/587c9997-3b6d-4654-9cf3-f181833c0728.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 986.326079] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6afde832-1631-464b-a2bb-447630f9a8a2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.333142] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for the task: (returnval){ [ 986.333142] env[61898]: value = "task-1241128" [ 986.333142] env[61898]: _type = "Task" [ 986.333142] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.341699] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241128, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.456647] env[61898]: DEBUG nova.network.neutron [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Successfully updated port: 630c2b2d-b17e-470f-ad5f-506c4734d40c {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 986.537798] env[61898]: DEBUG oslo_vmware.api [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241127, 'name': PowerOffVM_Task, 'duration_secs': 0.152618} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.540944] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 986.541152] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 986.543855] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 986.544128] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 986.544300] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 986.544493] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 986.544645] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 986.544812] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 986.544999] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 986.545178] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 986.545350] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 986.545521] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 986.545699] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 986.551463] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22e0c052-5c7b-434b-b047-813d52972424 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.556296] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f901e8a7-df1a-4343-b990-0dd80240850f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.574067] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 986.574067] env[61898]: value = "task-1241130" [ 986.574067] env[61898]: _type = "Task" [ 986.574067] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.586693] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241130, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.644703] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d84ba7-2df2-446b-a9e3-a92e92e1668d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.655037] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca17ee2-6591-45d0-8017-6347fcb190d5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.701060] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de55f6ee-b1cf-4117-a066-2e3694b7c8c0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.704198] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 986.704481] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 986.704694] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Deleting the datastore file [datastore2] 4522f4ef-c8f6-4fe1-acd5-796f87f22839 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 986.705380] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d1832d1-1c62-442f-9baa-2e5ca395b612 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.717103] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0615b4d-f085-4b7d-8716-3623b0a6a4ab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.721228] env[61898]: DEBUG oslo_vmware.api [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 986.721228] env[61898]: value = "task-1241131" [ 986.721228] env[61898]: _type = "Task" [ 986.721228] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.722365] env[61898]: INFO nova.compute.manager [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] instance snapshotting [ 986.736091] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3094c41-7e36-4d0e-8007-55a281089c76 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.739890] env[61898]: DEBUG nova.compute.provider_tree [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.748024] env[61898]: DEBUG oslo_vmware.api [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.762425] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0975bf12-64e8-41ee-bf3e-9141aa9fd901 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.783193] env[61898]: INFO nova.network.neutron [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Port 1b2175da-a7e5-4786-a4f6-780fb83f447c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 986.783602] env[61898]: DEBUG nova.network.neutron [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updating instance_info_cache with network_info: [{"id": "4bed7107-cc7d-431f-a835-84a51f188455", "address": "fa:16:3e:44:b0:e6", "network": {"id": "89882853-88ec-48f1-a883-3be9e65f9fd8", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-183348816-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e2c65efa327e403284ad2e78b3c7b7d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bed7107-cc", "ovs_interfaceid": "4bed7107-cc7d-431f-a835-84a51f188455", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.843856] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241128, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.959277] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 986.963025] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.963025] env[61898]: DEBUG nova.network.neutron [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 987.086438] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241130, 'name': ReconfigVM_Task, 'duration_secs': 0.49965} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.086890] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance 'c26c4add-728c-45ea-8465-7c4273b7d97b' progress to 33 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 987.230942] env[61898]: DEBUG oslo_vmware.api [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.245324] env[61898]: DEBUG nova.scheduler.client.report [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 987.276046] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 987.276532] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4087598b-a5f1-422e-95f1-b5ec38e988f2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.286231] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Releasing lock "refresh_cache-4522f4ef-c8f6-4fe1-acd5-796f87f22839" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.288309] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 987.288309] env[61898]: value = "task-1241132" [ 987.288309] env[61898]: _type = "Task" [ 987.288309] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.296284] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241132, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.344403] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241128, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512695} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.344875] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 587c9997-3b6d-4654-9cf3-f181833c0728/587c9997-3b6d-4654-9cf3-f181833c0728.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 987.344875] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 987.345137] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-302ea129-f1af-4e51-bfce-79ae7fc2a46a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.351665] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for the task: (returnval){ [ 987.351665] env[61898]: value = "task-1241133" [ 987.351665] env[61898]: _type = "Task" [ 987.351665] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.360070] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241133, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.490925] env[61898]: DEBUG nova.network.neutron [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 987.594362] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 987.594686] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 987.594861] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 987.595064] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 987.595218] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 987.595409] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 987.595655] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 987.595818] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 987.595979] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 987.596160] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 987.596511] env[61898]: DEBUG nova.virt.hardware [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 987.602146] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 987.602454] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57c2c0f0-3600-4647-8e29-e5e065f28493 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.621707] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 987.621707] env[61898]: value = "task-1241134" [ 987.621707] env[61898]: _type = "Task" [ 987.621707] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.629969] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241134, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.649353] env[61898]: DEBUG nova.network.neutron [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance_info_cache with network_info: [{"id": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "address": "fa:16:3e:f7:46:22", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630c2b2d-b1", "ovs_interfaceid": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.731611] env[61898]: DEBUG oslo_vmware.api [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.533132} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.731878] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 987.732080] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 987.732369] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 987.732479] env[61898]: INFO nova.compute.manager [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Took 1.72 seconds to destroy the instance on the hypervisor. [ 987.732668] env[61898]: DEBUG oslo.service.loopingcall [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 987.732864] env[61898]: DEBUG nova.compute.manager [-] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 987.732958] env[61898]: DEBUG nova.network.neutron [-] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 987.750878] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.387s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.751462] env[61898]: DEBUG nova.compute.manager [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 987.754102] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.567s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.789663] env[61898]: DEBUG oslo_concurrency.lockutils [None req-f8a5cbf0-5bfa-4995-87e8-95d386a94413 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "interface-4522f4ef-c8f6-4fe1-acd5-796f87f22839-1b2175da-a7e5-4786-a4f6-780fb83f447c" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.761s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.799635] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241132, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.861506] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241133, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070726} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.861787] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 987.862599] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4dedf8c-d2e8-4d4e-b961-159de11e5ef8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.885791] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 587c9997-3b6d-4654-9cf3-f181833c0728/587c9997-3b6d-4654-9cf3-f181833c0728.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.886104] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df41a3d8-adf8-4a34-b53e-7970f6567198 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.905861] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for the task: (returnval){ [ 987.905861] env[61898]: value = "task-1241135" [ 987.905861] env[61898]: _type = "Task" [ 987.905861] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.913743] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241135, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.136596] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241134, 'name': ReconfigVM_Task, 'duration_secs': 0.177358} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.137158] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 988.138035] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a281468b-cac4-4f8d-bade-d73635c573b7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.155271] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.155625] env[61898]: DEBUG nova.compute.manager [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Instance network_info: |[{"id": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "address": "fa:16:3e:f7:46:22", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630c2b2d-b1", "ovs_interfaceid": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 988.164297] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] c26c4add-728c-45ea-8465-7c4273b7d97b/c26c4add-728c-45ea-8465-7c4273b7d97b.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 988.164844] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:46:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '630c2b2d-b17e-470f-ad5f-506c4734d40c', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 988.172599] env[61898]: DEBUG oslo.service.loopingcall [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 988.172834] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4f4e132-af04-4acf-bb47-a045bdc9bc26 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.186787] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 988.187153] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0433ddab-d5a0-4751-bf7b-d6506a8d2385 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.206753] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 988.206753] env[61898]: value = "task-1241137" [ 988.206753] env[61898]: _type = "Task" [ 988.206753] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.208283] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 988.208283] env[61898]: value = "task-1241136" [ 988.208283] env[61898]: _type = "Task" [ 988.208283] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.221190] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241137, 'name': CreateVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.224913] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241136, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.238023] env[61898]: DEBUG nova.compute.manager [req-4d42e9de-982d-4933-bfe3-0ab25479e91e req-1a22347c-6beb-4b8f-8904-8407b9bafc9d service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Received event network-changed-630c2b2d-b17e-470f-ad5f-506c4734d40c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 988.238284] env[61898]: DEBUG nova.compute.manager [req-4d42e9de-982d-4933-bfe3-0ab25479e91e req-1a22347c-6beb-4b8f-8904-8407b9bafc9d service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Refreshing instance network info cache due to event network-changed-630c2b2d-b17e-470f-ad5f-506c4734d40c. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 988.238562] env[61898]: DEBUG oslo_concurrency.lockutils [req-4d42e9de-982d-4933-bfe3-0ab25479e91e req-1a22347c-6beb-4b8f-8904-8407b9bafc9d service nova] Acquiring lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.238711] env[61898]: DEBUG oslo_concurrency.lockutils [req-4d42e9de-982d-4933-bfe3-0ab25479e91e req-1a22347c-6beb-4b8f-8904-8407b9bafc9d service nova] Acquired lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.238911] env[61898]: DEBUG nova.network.neutron [req-4d42e9de-982d-4933-bfe3-0ab25479e91e req-1a22347c-6beb-4b8f-8904-8407b9bafc9d service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Refreshing network info cache for port 630c2b2d-b17e-470f-ad5f-506c4734d40c {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 988.257264] env[61898]: DEBUG nova.compute.utils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 988.262431] env[61898]: INFO nova.compute.claims [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 988.267069] env[61898]: DEBUG nova.compute.manager [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 988.267364] env[61898]: DEBUG nova.network.neutron [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 988.303153] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241132, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.333521] env[61898]: DEBUG nova.policy [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f25c631adf3c4f68b374a35b767a9429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30bd396aa1ff45ad946bc1a6fdb3b40b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 988.372331] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "523a29df-e21d-4e38-9437-ebcdd7012f57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.372669] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.416741] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241135, 'name': ReconfigVM_Task, 'duration_secs': 0.295015} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.417481] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 587c9997-3b6d-4654-9cf3-f181833c0728/587c9997-3b6d-4654-9cf3-f181833c0728.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 988.418874] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3bbe0c8-8026-4efb-909d-b71b99584c30 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.426318] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for the task: (returnval){ [ 988.426318] env[61898]: value = "task-1241138" [ 988.426318] env[61898]: _type = "Task" [ 988.426318] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.433212] env[61898]: DEBUG nova.compute.manager [req-45c5c461-303c-497b-9bbf-5563c31b5c7b req-71920981-7837-47e4-b3c6-1935f257303e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Received event network-vif-deleted-4bed7107-cc7d-431f-a835-84a51f188455 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 988.433483] env[61898]: INFO nova.compute.manager [req-45c5c461-303c-497b-9bbf-5563c31b5c7b req-71920981-7837-47e4-b3c6-1935f257303e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Neutron deleted interface 4bed7107-cc7d-431f-a835-84a51f188455; detaching it from the instance and deleting it from the info cache [ 988.433699] env[61898]: DEBUG nova.network.neutron [req-45c5c461-303c-497b-9bbf-5563c31b5c7b req-71920981-7837-47e4-b3c6-1935f257303e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.441435] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241138, 'name': Rename_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.630117] env[61898]: DEBUG nova.network.neutron [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Successfully created port: dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 988.719690] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241137, 'name': CreateVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.725154] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241136, 'name': ReconfigVM_Task, 'duration_secs': 0.455096} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.725577] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Reconfigured VM instance instance-00000060 to attach disk [datastore2] c26c4add-728c-45ea-8465-7c4273b7d97b/c26c4add-728c-45ea-8465-7c4273b7d97b.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 988.726052] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance 'c26c4add-728c-45ea-8465-7c4273b7d97b' progress to 50 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 988.767238] env[61898]: DEBUG nova.compute.manager [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 988.776467] env[61898]: INFO nova.compute.resource_tracker [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating resource usage from migration eead421f-5e2c-42f2-b3f8-23cd94168a40 [ 988.806023] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241132, 'name': CreateSnapshot_Task, 'duration_secs': 1.214866} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.806023] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 988.806023] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd89a28-89c3-41be-9a91-00be3508321a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.875342] env[61898]: DEBUG nova.compute.manager [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 988.911858] env[61898]: DEBUG nova.network.neutron [-] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.939674] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241138, 'name': Rename_Task, 'duration_secs': 0.157846} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.939916] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-663b33e3-2c0c-485f-aca4-8fa12276b42d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.941828] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 988.942084] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9da3fd79-3433-4975-978a-ac2d34b1e845 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.957621] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8650718-4034-4f98-8c5b-88d9d6632081 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.968419] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for the task: (returnval){ [ 988.968419] env[61898]: value = "task-1241139" [ 988.968419] env[61898]: _type = "Task" [ 988.968419] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.980212] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241139, 'name': PowerOnVM_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.987874] env[61898]: DEBUG nova.compute.manager [req-45c5c461-303c-497b-9bbf-5563c31b5c7b req-71920981-7837-47e4-b3c6-1935f257303e service nova] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Detach interface failed, port_id=4bed7107-cc7d-431f-a835-84a51f188455, reason: Instance 4522f4ef-c8f6-4fe1-acd5-796f87f22839 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 988.989058] env[61898]: DEBUG nova.network.neutron [req-4d42e9de-982d-4933-bfe3-0ab25479e91e req-1a22347c-6beb-4b8f-8904-8407b9bafc9d service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updated VIF entry in instance network info cache for port 630c2b2d-b17e-470f-ad5f-506c4734d40c. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 988.989396] env[61898]: DEBUG nova.network.neutron [req-4d42e9de-982d-4933-bfe3-0ab25479e91e req-1a22347c-6beb-4b8f-8904-8407b9bafc9d service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance_info_cache with network_info: [{"id": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "address": "fa:16:3e:f7:46:22", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630c2b2d-b1", "ovs_interfaceid": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.036318] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d33293c-4527-4e0d-80bd-d59068c07d80 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.044668] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8ea708-e921-49c1-919f-b61809dec280 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.077570] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34914307-ed02-4f20-9385-4b63a2c4e94d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.085592] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c66dbf0-e0a0-47a5-b923-922be86a99a0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.099508] env[61898]: DEBUG nova.compute.provider_tree [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.220502] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241137, 'name': CreateVM_Task, 'duration_secs': 0.519231} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.220785] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 989.221852] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.221852] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.221931] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 989.222162] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ecd5f90-f48d-4293-bc1b-2691e376eaeb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.226991] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 989.226991] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d34362-7123-0699-ed1f-7c164a7bb095" [ 989.226991] env[61898]: _type = "Task" [ 989.226991] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.233611] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6ffd84-4299-4d4c-bf2c-4dcaa5d4682f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.241900] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d34362-7123-0699-ed1f-7c164a7bb095, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.256962] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f75ef7-2745-4d16-90a8-13fbbf3a494c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.278188] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance 'c26c4add-728c-45ea-8465-7c4273b7d97b' progress to 67 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 989.327477] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 989.327618] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1d7165ec-0daf-4b34-949a-693be2df694d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.337171] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 989.337171] env[61898]: value = "task-1241140" [ 989.337171] env[61898]: _type = "Task" [ 989.337171] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.345386] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241140, 'name': CloneVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.397928] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.418088] env[61898]: INFO nova.compute.manager [-] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Took 1.68 seconds to deallocate network for instance. [ 989.481721] env[61898]: DEBUG oslo_vmware.api [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241139, 'name': PowerOnVM_Task, 'duration_secs': 0.512302} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.481721] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 989.483191] env[61898]: INFO nova.compute.manager [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Took 8.51 seconds to spawn the instance on the hypervisor. [ 989.483191] env[61898]: DEBUG nova.compute.manager [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 989.483191] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d660c8cc-e67b-4879-9af1-1e5c34235293 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.493972] env[61898]: DEBUG oslo_concurrency.lockutils [req-4d42e9de-982d-4933-bfe3-0ab25479e91e req-1a22347c-6beb-4b8f-8904-8407b9bafc9d service nova] Releasing lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.602882] env[61898]: DEBUG nova.scheduler.client.report [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 989.737733] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d34362-7123-0699-ed1f-7c164a7bb095, 'name': SearchDatastore_Task, 'duration_secs': 0.018615} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.738119] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 989.738433] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 989.738717] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.738900] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.739112] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.739373] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e35b6862-592e-4e94-b9af-23c78bbb9461 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.747098] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 989.747279] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 989.747947] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c3279fb-234d-4be7-b5ad-a8f5556de38a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.753040] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 989.753040] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]520ddecd-4598-9f57-f0c3-948de6618a16" [ 989.753040] env[61898]: _type = "Task" [ 989.753040] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.760392] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520ddecd-4598-9f57-f0c3-948de6618a16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.789740] env[61898]: DEBUG nova.compute.manager [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 989.821761] env[61898]: DEBUG nova.virt.hardware [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 989.822588] env[61898]: DEBUG nova.virt.hardware [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 989.822588] env[61898]: DEBUG nova.virt.hardware [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.823013] env[61898]: DEBUG nova.virt.hardware [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 989.823243] env[61898]: DEBUG nova.virt.hardware [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.823418] env[61898]: DEBUG nova.virt.hardware [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 989.823758] env[61898]: DEBUG nova.virt.hardware [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 989.824047] env[61898]: DEBUG nova.virt.hardware [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 989.824340] env[61898]: DEBUG nova.virt.hardware [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 989.824627] env[61898]: DEBUG nova.virt.hardware [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 989.824917] env[61898]: DEBUG nova.virt.hardware [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 989.826339] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f194fd-0bf4-441f-a61c-939e47fc4f3b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.830863] env[61898]: DEBUG nova.network.neutron [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Port fa47b33a-e279-408b-bcd7-9165ff102179 binding to destination host cpu-1 is already ACTIVE {{(pid=61898) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 989.838349] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d433de9a-84d2-4f2e-aa88-2ca39c276f75 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.851142] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241140, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.926203] env[61898]: DEBUG oslo_concurrency.lockutils [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.000460] env[61898]: INFO nova.compute.manager [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Took 19.34 seconds to build instance. [ 990.109129] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.355s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.109450] env[61898]: INFO nova.compute.manager [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Migrating [ 990.118100] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.863s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.118478] env[61898]: DEBUG nova.objects.instance [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Lazy-loading 'resources' on Instance uuid 06c894a2-9236-4534-922f-4255c6cf0531 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.261030] env[61898]: DEBUG nova.network.neutron [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Successfully updated port: dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 990.266133] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]520ddecd-4598-9f57-f0c3-948de6618a16, 'name': SearchDatastore_Task, 'duration_secs': 0.016506} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.266791] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94b57a0e-8315-4a75-a658-98f4bf89725c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.272213] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 990.272213] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]524f77bc-4199-eb85-aaca-4700519152c2" [ 990.272213] env[61898]: _type = "Task" [ 990.272213] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.280628] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524f77bc-4199-eb85-aaca-4700519152c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.340495] env[61898]: DEBUG nova.compute.manager [req-d5c5681b-db3a-4a5c-901f-f7ae1ccc3483 req-f3d42039-9451-4de3-a4aa-9a3ebc88f040 service nova] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Received event network-vif-plugged-dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 990.341088] env[61898]: DEBUG oslo_concurrency.lockutils [req-d5c5681b-db3a-4a5c-901f-f7ae1ccc3483 req-f3d42039-9451-4de3-a4aa-9a3ebc88f040 service nova] Acquiring lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.341339] env[61898]: DEBUG oslo_concurrency.lockutils [req-d5c5681b-db3a-4a5c-901f-f7ae1ccc3483 req-f3d42039-9451-4de3-a4aa-9a3ebc88f040 service nova] Lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.341524] env[61898]: DEBUG oslo_concurrency.lockutils [req-d5c5681b-db3a-4a5c-901f-f7ae1ccc3483 req-f3d42039-9451-4de3-a4aa-9a3ebc88f040 service nova] Lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.341698] env[61898]: DEBUG nova.compute.manager [req-d5c5681b-db3a-4a5c-901f-f7ae1ccc3483 req-f3d42039-9451-4de3-a4aa-9a3ebc88f040 service nova] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] No waiting events found dispatching network-vif-plugged-dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 990.341869] env[61898]: WARNING nova.compute.manager [req-d5c5681b-db3a-4a5c-901f-f7ae1ccc3483 req-f3d42039-9451-4de3-a4aa-9a3ebc88f040 service nova] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Received unexpected event network-vif-plugged-dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32 for instance with vm_state building and task_state spawning. [ 990.352857] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241140, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.502875] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d8b26514-776c-49ee-b585-e4e8970adea1 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Lock "587c9997-3b6d-4654-9cf3-f181833c0728" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.851s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.630380] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.630662] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.630941] env[61898]: DEBUG nova.network.neutron [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 990.766736] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "refresh_cache-2fe9d97d-57e0-4b08-968b-4bb97a610fbb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.766925] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "refresh_cache-2fe9d97d-57e0-4b08-968b-4bb97a610fbb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.767063] env[61898]: DEBUG nova.network.neutron [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 990.784251] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524f77bc-4199-eb85-aaca-4700519152c2, 'name': SearchDatastore_Task, 'duration_secs': 0.011978} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.786714] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 990.786979] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] d0184b78-1525-44a4-a515-3eeb34a59cde/d0184b78-1525-44a4-a515-3eeb34a59cde.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 990.787605] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a477df7-4efa-4abe-929b-a34201708918 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.794554] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 990.794554] env[61898]: value = "task-1241141" [ 990.794554] env[61898]: _type = "Task" [ 990.794554] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.805149] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.833159] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c88a270-6708-4be7-8f5b-af68ca99de03 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.842787] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1d4b13-d436-4ed3-9a36-4eb3fcc43426 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.891161] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "c26c4add-728c-45ea-8465-7c4273b7d97b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.891396] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.891576] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.894679] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62280349-c06b-4a3e-a5e6-1c3b303a4cad {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.901032] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241140, 'name': CloneVM_Task} progress is 95%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.907699] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f8111c-844d-470f-bb0d-4843b1658015 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.922312] env[61898]: DEBUG nova.compute.provider_tree [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.090438] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquiring lock "587c9997-3b6d-4654-9cf3-f181833c0728" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.090892] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Lock "587c9997-3b6d-4654-9cf3-f181833c0728" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.091207] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquiring lock "587c9997-3b6d-4654-9cf3-f181833c0728-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.091462] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Lock "587c9997-3b6d-4654-9cf3-f181833c0728-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.091686] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Lock "587c9997-3b6d-4654-9cf3-f181833c0728-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.094564] env[61898]: INFO nova.compute.manager [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Terminating instance [ 991.303085] env[61898]: DEBUG nova.network.neutron [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 991.310459] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241141, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.367896] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241140, 'name': CloneVM_Task, 'duration_secs': 1.654611} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.368192] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Created linked-clone VM from snapshot [ 991.369082] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264103f8-aee7-4579-af6e-d06841d092cc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.377893] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Uploading image cbad67c6-1ae6-4cb9-b818-a91c0d9862a3 {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 991.387471] env[61898]: DEBUG nova.network.neutron [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance_info_cache with network_info: [{"id": "4d73c23b-b607-471d-a628-1fcb200b386c", "address": "fa:16:3e:da:c8:ce", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d73c23b-b6", "ovs_interfaceid": "4d73c23b-b607-471d-a628-1fcb200b386c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.400038] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 991.400038] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a019b9a4-5ec2-4847-980c-dedfabd3890a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.405403] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 991.405403] env[61898]: value = "task-1241142" [ 991.405403] env[61898]: _type = "Task" [ 991.405403] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.419050] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241142, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.425869] env[61898]: DEBUG nova.scheduler.client.report [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 991.488493] env[61898]: DEBUG nova.network.neutron [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Updating instance_info_cache with network_info: [{"id": "dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32", "address": "fa:16:3e:50:c9:b1", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc9bbcfc-c6", "ovs_interfaceid": "dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.598781] env[61898]: DEBUG nova.compute.manager [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 991.599069] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 991.600072] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e61cdc-d1f9-40f0-9494-c6e1f366f2fb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.607961] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.608242] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d1bf5ce-03ac-4e73-accc-1ef6ca81cbe7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.613996] env[61898]: DEBUG oslo_vmware.api [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for the task: (returnval){ [ 991.613996] env[61898]: value = "task-1241143" [ 991.613996] env[61898]: _type = "Task" [ 991.613996] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.621903] env[61898]: DEBUG oslo_vmware.api [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.804737] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241141, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608393} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.805372] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] d0184b78-1525-44a4-a515-3eeb34a59cde/d0184b78-1525-44a4-a515-3eeb34a59cde.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 991.805672] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 991.805952] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a0ae07c-b73c-4b1a-81ff-27bf1af2ad47 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.813508] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 991.813508] env[61898]: value = "task-1241144" [ 991.813508] env[61898]: _type = "Task" [ 991.813508] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.822862] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241144, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.889192] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.916248] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241142, 'name': Destroy_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.923354] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.923530] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.923708] env[61898]: DEBUG nova.network.neutron [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 991.932077] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.814s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.934346] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.447s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.934610] env[61898]: DEBUG nova.objects.instance [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lazy-loading 'resources' on Instance uuid 45b8dc91-b577-4548-bf3a-32c7c936c616 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 991.953909] env[61898]: INFO nova.scheduler.client.report [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Deleted allocations for instance 06c894a2-9236-4534-922f-4255c6cf0531 [ 991.991668] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "refresh_cache-2fe9d97d-57e0-4b08-968b-4bb97a610fbb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.992341] env[61898]: DEBUG nova.compute.manager [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Instance network_info: |[{"id": "dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32", "address": "fa:16:3e:50:c9:b1", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc9bbcfc-c6", "ovs_interfaceid": "dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 991.992731] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:c9:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4223acd2-30f7-440e-b975-60b30d931694', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.000853] env[61898]: DEBUG oslo.service.loopingcall [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.001727] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 992.001976] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-696ef170-e152-47ed-a5da-eb17e4bc9170 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.022362] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.022362] env[61898]: value = "task-1241145" [ 992.022362] env[61898]: _type = "Task" [ 992.022362] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.030193] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241145, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.124251] env[61898]: DEBUG oslo_vmware.api [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241143, 'name': PowerOffVM_Task, 'duration_secs': 0.175585} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.124567] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 992.124762] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 992.125026] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78662b73-0bca-44d9-ac7a-f4f63112e06c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.190486] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 992.190695] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 992.190887] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Deleting the datastore file [datastore2] 587c9997-3b6d-4654-9cf3-f181833c0728 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.191187] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93636227-c415-4109-a414-2fab3eab7b8a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.197854] env[61898]: DEBUG oslo_vmware.api [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for the task: (returnval){ [ 992.197854] env[61898]: value = "task-1241147" [ 992.197854] env[61898]: _type = "Task" [ 992.197854] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.205782] env[61898]: DEBUG oslo_vmware.api [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241147, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.324728] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241144, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079144} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.325108] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 992.325962] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e1cee9-eef6-4084-82ce-f56aa0d2c3fd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.353857] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] d0184b78-1525-44a4-a515-3eeb34a59cde/d0184b78-1525-44a4-a515-3eeb34a59cde.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 992.354260] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3b1c59f-5969-424b-8bdb-ead829e5066e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.372889] env[61898]: DEBUG nova.compute.manager [req-9fbe7463-0b95-434c-bffb-35fb660db5bf req-323b7d11-854c-4567-a95f-4ce3f0f9966a service nova] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Received event network-changed-dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 992.373105] env[61898]: DEBUG nova.compute.manager [req-9fbe7463-0b95-434c-bffb-35fb660db5bf req-323b7d11-854c-4567-a95f-4ce3f0f9966a service nova] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Refreshing instance network info cache due to event network-changed-dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 992.373374] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fbe7463-0b95-434c-bffb-35fb660db5bf req-323b7d11-854c-4567-a95f-4ce3f0f9966a service nova] Acquiring lock "refresh_cache-2fe9d97d-57e0-4b08-968b-4bb97a610fbb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.373649] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fbe7463-0b95-434c-bffb-35fb660db5bf req-323b7d11-854c-4567-a95f-4ce3f0f9966a service nova] Acquired lock "refresh_cache-2fe9d97d-57e0-4b08-968b-4bb97a610fbb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.373955] env[61898]: DEBUG nova.network.neutron [req-9fbe7463-0b95-434c-bffb-35fb660db5bf req-323b7d11-854c-4567-a95f-4ce3f0f9966a service nova] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Refreshing network info cache for port dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 992.380525] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 992.380525] env[61898]: value = "task-1241148" [ 992.380525] env[61898]: _type = "Task" [ 992.380525] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.391572] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241148, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.415250] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241142, 'name': Destroy_Task, 'duration_secs': 0.528307} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.415546] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Destroyed the VM [ 992.415781] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 992.416055] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a0aeb2d0-5ec6-4efb-901f-514f3d2e1239 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.422898] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 992.422898] env[61898]: value = "task-1241149" [ 992.422898] env[61898]: _type = "Task" [ 992.422898] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.433321] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241149, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.462896] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c6b905a4-0b3a-4f05-8583-fa6e705173a3 tempest-ServerMetadataNegativeTestJSON-1802685007 tempest-ServerMetadataNegativeTestJSON-1802685007-project-member] Lock "06c894a2-9236-4534-922f-4255c6cf0531" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.678s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.532133] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241145, 'name': CreateVM_Task, 'duration_secs': 0.374856} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.536272] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 992.537127] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.537304] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.537622] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 992.538142] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2794eee6-77ab-403c-b477-967267887f8f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.542778] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 992.542778] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525ace3b-1400-c82f-b7f6-94a6f3eb53ce" [ 992.542778] env[61898]: _type = "Task" [ 992.542778] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.553614] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525ace3b-1400-c82f-b7f6-94a6f3eb53ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.631430] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b65a771-a641-4bae-ae1a-91b159b7b974 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.643110] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44263c1b-39fb-4a48-a3cf-fc30083dd4f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.644771] env[61898]: DEBUG nova.network.neutron [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance_info_cache with network_info: [{"id": "fa47b33a-e279-408b-bcd7-9165ff102179", "address": "fa:16:3e:82:72:df", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa47b33a-e2", "ovs_interfaceid": "fa47b33a-e279-408b-bcd7-9165ff102179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.676986] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb791ca-b9e3-4c2b-8bb0-6dff29646b92 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.687301] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ccc618-ab7f-472d-a4fe-75e0f743b013 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.704308] env[61898]: DEBUG nova.compute.provider_tree [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.714277] env[61898]: DEBUG oslo_vmware.api [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Task: {'id': task-1241147, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.252306} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.716018] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.716018] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 992.716018] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 992.716018] env[61898]: INFO nova.compute.manager [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Took 1.12 seconds to destroy the instance on the hypervisor. [ 992.716018] env[61898]: DEBUG oslo.service.loopingcall [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.716599] env[61898]: DEBUG nova.compute.manager [-] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 992.716599] env[61898]: DEBUG nova.network.neutron [-] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 992.890641] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241148, 'name': ReconfigVM_Task, 'duration_secs': 0.317462} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.890944] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfigured VM instance instance-00000065 to attach disk [datastore1] d0184b78-1525-44a4-a515-3eeb34a59cde/d0184b78-1525-44a4-a515-3eeb34a59cde.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 992.891619] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e2bdc6f-92bd-4691-9490-837d626a61fd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.899149] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 992.899149] env[61898]: value = "task-1241150" [ 992.899149] env[61898]: _type = "Task" [ 992.899149] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.908999] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241150, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.933418] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241149, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.045411] env[61898]: DEBUG nova.compute.manager [req-8fa7bce3-42f3-4f35-ace6-448491c4e785 req-1a7fadda-2398-4cdc-b9bf-e22ef2f4d0aa service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Received event network-vif-deleted-9240c0e2-0156-43f9-bb96-d168e811a19c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 993.045626] env[61898]: INFO nova.compute.manager [req-8fa7bce3-42f3-4f35-ace6-448491c4e785 req-1a7fadda-2398-4cdc-b9bf-e22ef2f4d0aa service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Neutron deleted interface 9240c0e2-0156-43f9-bb96-d168e811a19c; detaching it from the instance and deleting it from the info cache [ 993.045888] env[61898]: DEBUG nova.network.neutron [req-8fa7bce3-42f3-4f35-ace6-448491c4e785 req-1a7fadda-2398-4cdc-b9bf-e22ef2f4d0aa service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.063272] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525ace3b-1400-c82f-b7f6-94a6f3eb53ce, 'name': SearchDatastore_Task, 'duration_secs': 0.011308} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.063272] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.063272] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 993.063272] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.063272] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.063272] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 993.063272] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bcd3657f-9507-457f-ad0d-05ed7abd94e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.073476] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 993.073663] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 993.074394] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-668e5fbc-a893-4d1b-939a-22e63fd96e99 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.081377] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 993.081377] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a89bf4-672e-bd17-797d-8a3843b468d8" [ 993.081377] env[61898]: _type = "Task" [ 993.081377] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.093661] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a89bf4-672e-bd17-797d-8a3843b468d8, 'name': SearchDatastore_Task, 'duration_secs': 0.008783} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.094636] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8125d03-bfe9-4387-95ff-51bff7822817 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.100042] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 993.100042] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ddb597-b5ac-8e61-07c2-bb97b13d1e25" [ 993.100042] env[61898]: _type = "Task" [ 993.100042] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.109584] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ddb597-b5ac-8e61-07c2-bb97b13d1e25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.148370] env[61898]: DEBUG oslo_concurrency.lockutils [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.209881] env[61898]: DEBUG nova.scheduler.client.report [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 993.382435] env[61898]: DEBUG nova.network.neutron [req-9fbe7463-0b95-434c-bffb-35fb660db5bf req-323b7d11-854c-4567-a95f-4ce3f0f9966a service nova] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Updated VIF entry in instance network info cache for port dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 993.382868] env[61898]: DEBUG nova.network.neutron [req-9fbe7463-0b95-434c-bffb-35fb660db5bf req-323b7d11-854c-4567-a95f-4ce3f0f9966a service nova] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Updating instance_info_cache with network_info: [{"id": "dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32", "address": "fa:16:3e:50:c9:b1", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc9bbcfc-c6", "ovs_interfaceid": "dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.409433] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae095266-b806-4c43-9ae9-5677a65dd82f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.418564] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241150, 'name': Rename_Task, 'duration_secs': 0.142817} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.437265] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 993.438528] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance '008bab4f-240b-4cb7-86eb-9b1f01ea6e4c' progress to 0 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 993.443475] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1ba455c-9ba4-4724-bc1a-28979a3c7e78 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.455052] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241149, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.456770] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 993.456770] env[61898]: value = "task-1241151" [ 993.456770] env[61898]: _type = "Task" [ 993.456770] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.465779] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.470210] env[61898]: DEBUG nova.network.neutron [-] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.555257] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b2375e3-eae9-4466-9bf1-18fbbf288117 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.565184] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3519c8-2aba-40e9-81de-a1e04d500f74 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.593898] env[61898]: DEBUG nova.compute.manager [req-8fa7bce3-42f3-4f35-ace6-448491c4e785 req-1a7fadda-2398-4cdc-b9bf-e22ef2f4d0aa service nova] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Detach interface failed, port_id=9240c0e2-0156-43f9-bb96-d168e811a19c, reason: Instance 587c9997-3b6d-4654-9cf3-f181833c0728 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 993.609407] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ddb597-b5ac-8e61-07c2-bb97b13d1e25, 'name': SearchDatastore_Task, 'duration_secs': 0.008286} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.609727] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.609992] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 2fe9d97d-57e0-4b08-968b-4bb97a610fbb/2fe9d97d-57e0-4b08-968b-4bb97a610fbb.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 993.610267] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92409a3c-c09a-414e-b100-a173a659d641 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.618453] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 993.618453] env[61898]: value = "task-1241152" [ 993.618453] env[61898]: _type = "Task" [ 993.618453] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.627025] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.677637] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995a6b54-456d-4383-bc04-44618f46a7e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.699586] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84aff320-e3df-4562-b0dc-2515f34e14b3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.707235] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance 'c26c4add-728c-45ea-8465-7c4273b7d97b' progress to 83 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 993.715098] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.781s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.717258] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 8.273s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.741624] env[61898]: INFO nova.scheduler.client.report [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Deleted allocations for instance 45b8dc91-b577-4548-bf3a-32c7c936c616 [ 993.886806] env[61898]: DEBUG oslo_concurrency.lockutils [req-9fbe7463-0b95-434c-bffb-35fb660db5bf req-323b7d11-854c-4567-a95f-4ce3f0f9966a service nova] Releasing lock "refresh_cache-2fe9d97d-57e0-4b08-968b-4bb97a610fbb" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.946176] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 993.946615] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06372f74-874b-41c7-888c-ded6c44bd3ce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.956261] env[61898]: DEBUG oslo_vmware.api [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241149, 'name': RemoveSnapshot_Task, 'duration_secs': 1.119206} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.958015] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 993.960897] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 993.960897] env[61898]: value = "task-1241153" [ 993.960897] env[61898]: _type = "Task" [ 993.960897] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.980445] env[61898]: INFO nova.compute.manager [-] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Took 1.26 seconds to deallocate network for instance. [ 993.980911] env[61898]: DEBUG oslo_vmware.api [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241151, 'name': PowerOnVM_Task, 'duration_secs': 0.491045} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.981216] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241153, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.983318] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 993.984245] env[61898]: INFO nova.compute.manager [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Took 8.42 seconds to spawn the instance on the hypervisor. [ 993.984245] env[61898]: DEBUG nova.compute.manager [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 993.987480] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848782e4-3389-459e-84ef-ffe21c1d90f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.128209] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241152, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473494} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.128605] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 2fe9d97d-57e0-4b08-968b-4bb97a610fbb/2fe9d97d-57e0-4b08-968b-4bb97a610fbb.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 994.128828] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 994.129095] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e8d3d20-5fef-47cb-b71f-5af3f979de36 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.136117] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 994.136117] env[61898]: value = "task-1241154" [ 994.136117] env[61898]: _type = "Task" [ 994.136117] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.144746] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241154, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.213920] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.214307] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7195fc6-9f86-4811-8652-f80a704d011f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.221224] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 994.221224] env[61898]: value = "task-1241155" [ 994.221224] env[61898]: _type = "Task" [ 994.221224] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.238424] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.252668] env[61898]: DEBUG oslo_concurrency.lockutils [None req-6cb902cd-6bd3-4d27-9c71-d9acb09a2ace tempest-ServersNegativeTestJSON-996594428 tempest-ServersNegativeTestJSON-996594428-project-member] Lock "45b8dc91-b577-4548-bf3a-32c7c936c616" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.269s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.467025] env[61898]: WARNING nova.compute.manager [None req-1bfb4b77-f46f-4575-8446-91f37ec07193 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Image not found during snapshot: nova.exception.ImageNotFound: Image cbad67c6-1ae6-4cb9-b818-a91c0d9862a3 could not be found. [ 994.478891] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241153, 'name': PowerOffVM_Task, 'duration_secs': 0.306911} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.479272] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 994.479574] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance '008bab4f-240b-4cb7-86eb-9b1f01ea6e4c' progress to 17 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 994.492551] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.511016] env[61898]: INFO nova.compute.manager [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Took 20.15 seconds to build instance. [ 994.648625] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241154, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070421} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.649045] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 994.650187] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d91935c-23cc-4bb0-a5cd-b673263cdc6d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.673528] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 2fe9d97d-57e0-4b08-968b-4bb97a610fbb/2fe9d97d-57e0-4b08-968b-4bb97a610fbb.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 994.674249] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e04101c7-df2c-47cc-bfc8-e7e137c072d1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.696426] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 994.696426] env[61898]: value = "task-1241156" [ 994.696426] env[61898]: _type = "Task" [ 994.696426] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.706106] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241156, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.736158] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Applying migration context for instance 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c as it has an incoming, in-progress migration eead421f-5e2c-42f2-b3f8-23cd94168a40. Migration status is migrating {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 994.736447] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Applying migration context for instance c26c4add-728c-45ea-8465-7c4273b7d97b as it has an incoming, in-progress migration 8c71a041-862c-4011-981e-e1a3c351dce3. Migration status is post-migrating {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 994.737693] env[61898]: INFO nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating resource usage from migration eead421f-5e2c-42f2-b3f8-23cd94168a40 [ 994.738019] env[61898]: INFO nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating resource usage from migration 8c71a041-862c-4011-981e-e1a3c351dce3 [ 994.740534] env[61898]: DEBUG oslo_vmware.api [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241155, 'name': PowerOnVM_Task, 'duration_secs': 0.504104} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.741187] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 994.741275] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-efe87730-a3f1-4069-a901-a21c28a9309c tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance 'c26c4add-728c-45ea-8465-7c4273b7d97b' progress to 100 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 994.765927] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 320577e5-f197-4f66-a94f-9b9ba2479325 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.766101] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.766258] env[61898]: WARNING nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 4522f4ef-c8f6-4fe1-acd5-796f87f22839 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 994.766375] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.767194] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 7eb0d534-90c8-439d-a894-3f03151ac74b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.767343] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 587c9997-3b6d-4654-9cf3-f181833c0728 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.767466] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Migration 8c71a041-862c-4011-981e-e1a3c351dce3 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 994.767614] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance c26c4add-728c-45ea-8465-7c4273b7d97b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.767801] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance d0184b78-1525-44a4-a515-3eeb34a59cde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.767849] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 2fe9d97d-57e0-4b08-968b-4bb97a610fbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.767954] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Migration eead421f-5e2c-42f2-b3f8-23cd94168a40 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 994.768077] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.938425] env[61898]: DEBUG oslo_concurrency.lockutils [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "7eb0d534-90c8-439d-a894-3f03151ac74b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.938760] env[61898]: DEBUG oslo_concurrency.lockutils [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "7eb0d534-90c8-439d-a894-3f03151ac74b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.938978] env[61898]: DEBUG oslo_concurrency.lockutils [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "7eb0d534-90c8-439d-a894-3f03151ac74b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.939194] env[61898]: DEBUG oslo_concurrency.lockutils [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "7eb0d534-90c8-439d-a894-3f03151ac74b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.939369] env[61898]: DEBUG oslo_concurrency.lockutils [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "7eb0d534-90c8-439d-a894-3f03151ac74b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.942925] env[61898]: INFO nova.compute.manager [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Terminating instance [ 994.986517] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 994.986789] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 994.986946] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.987180] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 994.987340] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.987493] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 994.987698] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 994.987860] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 994.988044] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 994.988293] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 994.988492] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 994.993940] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1df0265-b94b-4507-a410-0cc8ca749cb4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.011115] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 995.011115] env[61898]: value = "task-1241157" [ 995.011115] env[61898]: _type = "Task" [ 995.011115] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.014427] env[61898]: DEBUG oslo_concurrency.lockutils [None req-250f98f5-1a2f-4c3a-82f2-cc2caeaedd39 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.662s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 995.020324] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241157, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.209105] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241156, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.272037] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 523a29df-e21d-4e38-9437-ebcdd7012f57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 995.272416] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 995.272651] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2752MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 995.448019] env[61898]: DEBUG nova.compute.manager [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 995.448172] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 995.449331] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e3e5e3-11e0-47d6-aa61-0e9d2c37b244 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.463761] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.464205] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8a67592-edc0-42e7-b07d-b3ce5c2cba46 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.473589] env[61898]: DEBUG oslo_vmware.api [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 995.473589] env[61898]: value = "task-1241158" [ 995.473589] env[61898]: _type = "Task" [ 995.473589] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.478076] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5787b1d7-31d7-4387-8c12-ec0bafbeca8b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.488105] env[61898]: DEBUG oslo_vmware.api [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.490939] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f40bbf7-4ade-4662-8e3c-505b761e0c06 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.525603] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f6fbc9-ff8a-4b96-937b-b59a3fd989ff {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.536226] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241157, 'name': ReconfigVM_Task, 'duration_secs': 0.467143} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.538927] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance '008bab4f-240b-4cb7-86eb-9b1f01ea6e4c' progress to 33 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 995.545305] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a493bddf-99fe-4849-a9c6-7dd574ce5783 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.562722] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.581933] env[61898]: DEBUG nova.compute.manager [req-f24b7f48-8c06-4079-83f4-a32dccb3280a req-2d75936b-77df-45bc-996d-15d951a8aaef service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Received event network-changed-630c2b2d-b17e-470f-ad5f-506c4734d40c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 995.581991] env[61898]: DEBUG nova.compute.manager [req-f24b7f48-8c06-4079-83f4-a32dccb3280a req-2d75936b-77df-45bc-996d-15d951a8aaef service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Refreshing instance network info cache due to event network-changed-630c2b2d-b17e-470f-ad5f-506c4734d40c. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 995.582270] env[61898]: DEBUG oslo_concurrency.lockutils [req-f24b7f48-8c06-4079-83f4-a32dccb3280a req-2d75936b-77df-45bc-996d-15d951a8aaef service nova] Acquiring lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.582371] env[61898]: DEBUG oslo_concurrency.lockutils [req-f24b7f48-8c06-4079-83f4-a32dccb3280a req-2d75936b-77df-45bc-996d-15d951a8aaef service nova] Acquired lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.582538] env[61898]: DEBUG nova.network.neutron [req-f24b7f48-8c06-4079-83f4-a32dccb3280a req-2d75936b-77df-45bc-996d-15d951a8aaef service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Refreshing network info cache for port 630c2b2d-b17e-470f-ad5f-506c4734d40c {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 995.707722] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241156, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.983997] env[61898]: DEBUG oslo_vmware.api [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241158, 'name': PowerOffVM_Task, 'duration_secs': 0.228618} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.984130] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.984305] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.984710] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2adbe83c-a807-4a67-b3c8-81e58076b145 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.052921] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 996.053310] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 996.053509] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 996.053762] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 996.053987] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 996.054208] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 996.054555] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 996.054790] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 996.054976] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 996.055167] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 996.055382] env[61898]: DEBUG nova.virt.hardware [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 996.061253] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Reconfiguring VM instance instance-00000062 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 996.062669] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20ed0c1b-a64e-4163-b95c-93a3ba1a8891 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.077027] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 996.077260] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 996.077457] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleting the datastore file [datastore2] 7eb0d534-90c8-439d-a894-3f03151ac74b {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 996.078680] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 996.082323] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f584856-d658-42d7-9481-cf7fe56c0773 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.098350] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 996.098350] env[61898]: value = "task-1241160" [ 996.098350] env[61898]: _type = "Task" [ 996.098350] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.105937] env[61898]: DEBUG oslo_vmware.api [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for the task: (returnval){ [ 996.105937] env[61898]: value = "task-1241161" [ 996.105937] env[61898]: _type = "Task" [ 996.105937] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.113093] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241160, 'name': ReconfigVM_Task} progress is 10%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.121760] env[61898]: DEBUG oslo_vmware.api [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241161, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.212743] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241156, 'name': ReconfigVM_Task, 'duration_secs': 1.0893} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.213195] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 2fe9d97d-57e0-4b08-968b-4bb97a610fbb/2fe9d97d-57e0-4b08-968b-4bb97a610fbb.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.213901] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37567165-4314-4c58-87f4-87c07ac0742c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.222630] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 996.222630] env[61898]: value = "task-1241162" [ 996.222630] env[61898]: _type = "Task" [ 996.222630] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.235721] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241162, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.413339] env[61898]: DEBUG nova.network.neutron [req-f24b7f48-8c06-4079-83f4-a32dccb3280a req-2d75936b-77df-45bc-996d-15d951a8aaef service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updated VIF entry in instance network info cache for port 630c2b2d-b17e-470f-ad5f-506c4734d40c. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 996.414848] env[61898]: DEBUG nova.network.neutron [req-f24b7f48-8c06-4079-83f4-a32dccb3280a req-2d75936b-77df-45bc-996d-15d951a8aaef service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance_info_cache with network_info: [{"id": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "address": "fa:16:3e:f7:46:22", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630c2b2d-b1", "ovs_interfaceid": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.584865] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 996.584865] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.867s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.584865] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.186s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.585863] env[61898]: INFO nova.compute.claims [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.611025] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241160, 'name': ReconfigVM_Task, 'duration_secs': 0.211385} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.613573] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Reconfigured VM instance instance-00000062 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 996.615180] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0907e24d-8c78-4f56-af79-769039434200 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.625245] env[61898]: DEBUG oslo_vmware.api [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Task: {'id': task-1241161, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223464} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.637659] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 996.637659] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 996.637853] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 996.638033] env[61898]: INFO nova.compute.manager [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Took 1.19 seconds to destroy the instance on the hypervisor. [ 996.639358] env[61898]: DEBUG oslo.service.loopingcall [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 996.646990] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c/008bab4f-240b-4cb7-86eb-9b1f01ea6e4c.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.646990] env[61898]: DEBUG nova.compute.manager [-] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 996.646990] env[61898]: DEBUG nova.network.neutron [-] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 996.649585] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7488dbbc-20b2-4861-97b2-4f6904b0c405 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.670068] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 996.670068] env[61898]: value = "task-1241163" [ 996.670068] env[61898]: _type = "Task" [ 996.670068] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.680115] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241163, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.736758] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241162, 'name': Rename_Task, 'duration_secs': 0.278177} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.736758] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 996.737479] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-248748a9-86bd-48f7-9eba-f6c048548991 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.745189] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 996.745189] env[61898]: value = "task-1241164" [ 996.745189] env[61898]: _type = "Task" [ 996.745189] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.759014] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.919171] env[61898]: DEBUG oslo_concurrency.lockutils [req-f24b7f48-8c06-4079-83f4-a32dccb3280a req-2d75936b-77df-45bc-996d-15d951a8aaef service nova] Releasing lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.096560] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "c26c4add-728c-45ea-8465-7c4273b7d97b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.096560] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.097268] env[61898]: DEBUG nova.compute.manager [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Going to confirm migration 3 {{(pid=61898) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5112}} [ 997.180311] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241163, 'name': ReconfigVM_Task, 'duration_secs': 0.366779} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.180664] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c/008bab4f-240b-4cb7-86eb-9b1f01ea6e4c.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 997.180940] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance '008bab4f-240b-4cb7-86eb-9b1f01ea6e4c' progress to 50 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 997.256572] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241164, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.436723] env[61898]: DEBUG nova.network.neutron [-] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.639209] env[61898]: DEBUG nova.compute.manager [req-ec76ecd5-05ba-494f-964a-545082120c00 req-9cceba2f-e0d5-41c4-8405-dc7c79917d4f service nova] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Received event network-vif-deleted-7c600ee7-db6f-41e0-b4e1-c7bbb2b917c2 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 997.640459] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.640533] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.640699] env[61898]: DEBUG nova.network.neutron [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 997.640881] env[61898]: DEBUG nova.objects.instance [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lazy-loading 'info_cache' on Instance uuid c26c4add-728c-45ea-8465-7c4273b7d97b {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.689150] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2e48b7-1cab-4d0a-9f87-df3bc575ba8f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.713544] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9221c74-8cdc-4fb6-94ef-bce05cdb1b43 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.732378] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance '008bab4f-240b-4cb7-86eb-9b1f01ea6e4c' progress to 67 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 997.757904] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241164, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.792887] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d110da6-7b09-4340-9717-632c9cf0a607 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.800949] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6e9646-24b4-4561-9874-5f216f8b7a44 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.832240] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cfbf384-3168-48f6-b6b7-87b483a465d2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.840451] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f70c669-a773-4ded-b776-abe4e22fc0ea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.855796] env[61898]: DEBUG nova.compute.provider_tree [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 997.940108] env[61898]: INFO nova.compute.manager [-] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Took 1.29 seconds to deallocate network for instance. [ 998.259227] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241164, 'name': PowerOnVM_Task} progress is 78%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.273838] env[61898]: DEBUG nova.network.neutron [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Port 4d73c23b-b607-471d-a628-1fcb200b386c binding to destination host cpu-1 is already ACTIVE {{(pid=61898) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 998.376393] env[61898]: ERROR nova.scheduler.client.report [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [req-c4dc7153-340b-436e-a94f-400591359c28] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 79886f75-94e9-4bf0-9cbd-87f3715d3144. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c4dc7153-340b-436e-a94f-400591359c28"}]} [ 998.393775] env[61898]: DEBUG nova.scheduler.client.report [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Refreshing inventories for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 998.407672] env[61898]: DEBUG nova.scheduler.client.report [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Updating ProviderTree inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 998.407672] env[61898]: DEBUG nova.compute.provider_tree [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 998.420026] env[61898]: DEBUG nova.scheduler.client.report [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Refreshing aggregate associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, aggregates: None {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 998.440870] env[61898]: DEBUG nova.scheduler.client.report [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Refreshing trait associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 998.447258] env[61898]: DEBUG oslo_concurrency.lockutils [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.603801] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5a7704-4396-4802-8509-1dc84cea4db0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.612402] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35ca5e4-d2b5-47a8-bddf-cb5942a9ae56 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.642998] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd48f519-d37a-40fd-b350-66962ab8eb87 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.652572] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00690b83-6928-4e5a-8b14-6c0e3c95ab3e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.669335] env[61898]: DEBUG nova.compute.provider_tree [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 998.760996] env[61898]: DEBUG oslo_vmware.api [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241164, 'name': PowerOnVM_Task, 'duration_secs': 1.826951} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.761335] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 998.761549] env[61898]: INFO nova.compute.manager [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Took 8.97 seconds to spawn the instance on the hypervisor. [ 998.761828] env[61898]: DEBUG nova.compute.manager [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 998.762689] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a1993b-9c12-4cdc-ada2-6f76ce1a046f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.924527] env[61898]: DEBUG nova.network.neutron [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance_info_cache with network_info: [{"id": "fa47b33a-e279-408b-bcd7-9165ff102179", "address": "fa:16:3e:82:72:df", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa47b33a-e2", "ovs_interfaceid": "fa47b33a-e279-408b-bcd7-9165ff102179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.198444] env[61898]: DEBUG nova.scheduler.client.report [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Updated inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with generation 127 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 999.198749] env[61898]: DEBUG nova.compute.provider_tree [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Updating resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 generation from 127 to 128 during operation: update_inventory {{(pid=61898) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 999.198937] env[61898]: DEBUG nova.compute.provider_tree [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 999.288505] env[61898]: INFO nova.compute.manager [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Took 21.50 seconds to build instance. [ 999.296074] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.296332] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.296515] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.330161] env[61898]: DEBUG oslo_concurrency.lockutils [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.330403] env[61898]: DEBUG oslo_concurrency.lockutils [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.427557] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "refresh_cache-c26c4add-728c-45ea-8465-7c4273b7d97b" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.427841] env[61898]: DEBUG nova.objects.instance [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lazy-loading 'migration_context' on Instance uuid c26c4add-728c-45ea-8465-7c4273b7d97b {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.704576] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.120s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.705320] env[61898]: DEBUG nova.compute.manager [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 999.709147] env[61898]: DEBUG oslo_concurrency.lockutils [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.783s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.709425] env[61898]: DEBUG oslo_concurrency.lockutils [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.712058] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.220s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.712364] env[61898]: DEBUG nova.objects.instance [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Lazy-loading 'resources' on Instance uuid 587c9997-3b6d-4654-9cf3-f181833c0728 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.735030] env[61898]: INFO nova.scheduler.client.report [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Deleted allocations for instance 4522f4ef-c8f6-4fe1-acd5-796f87f22839 [ 999.798788] env[61898]: DEBUG oslo_concurrency.lockutils [None req-7b5d6fb7-0bee-4f53-ba79-6170320c4dba tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.028s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.833696] env[61898]: DEBUG nova.compute.utils [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 999.931394] env[61898]: DEBUG nova.objects.base [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 999.932430] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fd1971-b0fb-485b-b4a0-6510d933c22b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.954534] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ac22d87-6a80-4e37-80b0-cc776a2fb0e1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.960793] env[61898]: DEBUG oslo_vmware.api [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 999.960793] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521480cd-6ecd-8649-52fd-7c934918e45e" [ 999.960793] env[61898]: _type = "Task" [ 999.960793] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.968966] env[61898]: DEBUG oslo_vmware.api [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521480cd-6ecd-8649-52fd-7c934918e45e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.215732] env[61898]: DEBUG nova.compute.utils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1000.220905] env[61898]: DEBUG nova.compute.manager [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1000.220905] env[61898]: DEBUG nova.network.neutron [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1000.242843] env[61898]: DEBUG oslo_concurrency.lockutils [None req-51c48169-190e-4e64-a533-f8fb9a19f526 tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "4522f4ef-c8f6-4fe1-acd5-796f87f22839" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.745s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.287759] env[61898]: DEBUG nova.policy [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e74724b381542e0be0664c9256ecc3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6198f817d1b471483500fe05c9bef3f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 1000.335064] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.335281] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.335468] env[61898]: DEBUG nova.network.neutron [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1000.336947] env[61898]: DEBUG oslo_concurrency.lockutils [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.392083] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ab6f96-0c47-49ec-828f-aeff6e8f96ba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.403026] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d371a1b6-1a4f-4cf4-bb0d-c5a6ab488324 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.431677] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1382b8-a3d2-472a-9929-fabcd5f2b19b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.440260] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205b242d-0b45-4b18-be3c-e61ded726325 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.456346] env[61898]: DEBUG nova.compute.provider_tree [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.471544] env[61898]: DEBUG oslo_vmware.api [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521480cd-6ecd-8649-52fd-7c934918e45e, 'name': SearchDatastore_Task, 'duration_secs': 0.011313} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.472569] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.695352] env[61898]: DEBUG nova.network.neutron [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Successfully created port: fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1000.720347] env[61898]: DEBUG nova.compute.manager [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1000.959686] env[61898]: DEBUG nova.scheduler.client.report [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1001.051425] env[61898]: DEBUG nova.network.neutron [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance_info_cache with network_info: [{"id": "4d73c23b-b607-471d-a628-1fcb200b386c", "address": "fa:16:3e:da:c8:ce", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d73c23b-b6", "ovs_interfaceid": "4d73c23b-b607-471d-a628-1fcb200b386c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.186054] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.186435] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.186731] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.186977] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.187240] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.189884] env[61898]: INFO nova.compute.manager [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Terminating instance [ 1001.408903] env[61898]: DEBUG oslo_concurrency.lockutils [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.409341] env[61898]: DEBUG oslo_concurrency.lockutils [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.409725] env[61898]: INFO nova.compute.manager [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Attaching volume e8160b82-c406-4acc-af77-282250d77399 to /dev/sdb [ 1001.442819] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177cd42f-6ed3-45a8-9375-fdcd5c96c180 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.450935] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4561588-e744-4ac3-9ea5-39eeadc73cef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.467983] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.756s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.470109] env[61898]: DEBUG nova.virt.block_device [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Updating existing volume attachment record: 0f36c3b3-446c-42dd-b70f-ab6dc5132430 {{(pid=61898) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1001.472545] env[61898]: DEBUG oslo_concurrency.lockutils [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.025s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.473643] env[61898]: DEBUG nova.objects.instance [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lazy-loading 'resources' on Instance uuid 7eb0d534-90c8-439d-a894-3f03151ac74b {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.482705] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.483022] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.483325] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.483505] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.484449] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.485980] env[61898]: INFO nova.compute.manager [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Terminating instance [ 1001.495886] env[61898]: INFO nova.scheduler.client.report [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Deleted allocations for instance 587c9997-3b6d-4654-9cf3-f181833c0728 [ 1001.557226] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.694114] env[61898]: DEBUG nova.compute.manager [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1001.694500] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1001.695859] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2dd256-de77-4744-b2f7-49959e59e8a3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.706167] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1001.706443] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d1e5311-f95b-425a-ba76-9a1608c9520b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.713989] env[61898]: DEBUG oslo_vmware.api [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1001.713989] env[61898]: value = "task-1241166" [ 1001.713989] env[61898]: _type = "Task" [ 1001.713989] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.724922] env[61898]: DEBUG oslo_vmware.api [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241166, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.729351] env[61898]: DEBUG nova.compute.manager [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1001.760241] env[61898]: DEBUG nova.virt.hardware [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1001.760570] env[61898]: DEBUG nova.virt.hardware [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1001.760767] env[61898]: DEBUG nova.virt.hardware [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1001.760964] env[61898]: DEBUG nova.virt.hardware [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1001.761133] env[61898]: DEBUG nova.virt.hardware [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1001.761313] env[61898]: DEBUG nova.virt.hardware [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1001.761555] env[61898]: DEBUG nova.virt.hardware [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1001.761739] env[61898]: DEBUG nova.virt.hardware [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1001.761932] env[61898]: DEBUG nova.virt.hardware [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1001.762117] env[61898]: DEBUG nova.virt.hardware [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1001.762320] env[61898]: DEBUG nova.virt.hardware [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1001.763278] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3efc2dd-7f91-4235-91d9-9ff8c3cb07c1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.773187] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25216607-f03f-4db7-ba83-a444b535a1f7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.990265] env[61898]: DEBUG nova.compute.manager [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1001.990518] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1001.991493] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fde76f-c32e-4c75-9d20-043a05524839 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.004921] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.005454] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0e92a617-3f9f-4693-aba7-1e30bd31df71 tempest-InstanceActionsNegativeTestJSON-1123879777 tempest-InstanceActionsNegativeTestJSON-1123879777-project-member] Lock "587c9997-3b6d-4654-9cf3-f181833c0728" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.915s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.006374] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-934ecde9-64f4-4e7b-a378-22697165420e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.018511] env[61898]: DEBUG oslo_vmware.api [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 1002.018511] env[61898]: value = "task-1241167" [ 1002.018511] env[61898]: _type = "Task" [ 1002.018511] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.029580] env[61898]: DEBUG oslo_vmware.api [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241167, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.084987] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0b4dad-c715-44e8-92d4-3c7f3096f0ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.112700] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25cd3f93-6ec0-47b8-b1d8-0e98845f2a1f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.121662] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance '008bab4f-240b-4cb7-86eb-9b1f01ea6e4c' progress to 83 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1002.167355] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a446b34b-f930-4707-8d66-49ec6acba248 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.177051] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45020d69-af2a-4d36-b63e-384ea188af9d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.211405] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994ae5cd-91fe-4251-b0db-00e34d9a8fc1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.221268] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83353ad-5278-4b1b-961a-135b96967564 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.228763] env[61898]: DEBUG oslo_vmware.api [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241166, 'name': PowerOffVM_Task, 'duration_secs': 0.215674} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.229265] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1002.232958] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1002.232958] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d06d8a52-b0e2-4d20-99d1-d80d173bf695 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.240552] env[61898]: DEBUG nova.compute.provider_tree [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.313539] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1002.313786] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1002.313972] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleting the datastore file [datastore1] 2fe9d97d-57e0-4b08-968b-4bb97a610fbb {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.314611] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39afc43a-3bfd-4271-9b9d-830e200f74db {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.322279] env[61898]: DEBUG oslo_vmware.api [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1002.322279] env[61898]: value = "task-1241169" [ 1002.322279] env[61898]: _type = "Task" [ 1002.322279] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.330979] env[61898]: DEBUG oslo_vmware.api [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.337686] env[61898]: DEBUG nova.compute.manager [req-43bb477e-9ba3-4c8e-aabc-21a56e28082e req-3def4f42-1a27-42e2-ae52-763cd391db8e service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Received event network-vif-plugged-fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1002.337921] env[61898]: DEBUG oslo_concurrency.lockutils [req-43bb477e-9ba3-4c8e-aabc-21a56e28082e req-3def4f42-1a27-42e2-ae52-763cd391db8e service nova] Acquiring lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.338231] env[61898]: DEBUG oslo_concurrency.lockutils [req-43bb477e-9ba3-4c8e-aabc-21a56e28082e req-3def4f42-1a27-42e2-ae52-763cd391db8e service nova] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.338545] env[61898]: DEBUG oslo_concurrency.lockutils [req-43bb477e-9ba3-4c8e-aabc-21a56e28082e req-3def4f42-1a27-42e2-ae52-763cd391db8e service nova] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.338657] env[61898]: DEBUG nova.compute.manager [req-43bb477e-9ba3-4c8e-aabc-21a56e28082e req-3def4f42-1a27-42e2-ae52-763cd391db8e service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] No waiting events found dispatching network-vif-plugged-fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1002.338854] env[61898]: WARNING nova.compute.manager [req-43bb477e-9ba3-4c8e-aabc-21a56e28082e req-3def4f42-1a27-42e2-ae52-763cd391db8e service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Received unexpected event network-vif-plugged-fc927434-188b-4c42-9200-bcb870385a25 for instance with vm_state building and task_state spawning. [ 1002.497652] env[61898]: DEBUG nova.network.neutron [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Successfully updated port: fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1002.531043] env[61898]: DEBUG oslo_vmware.api [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241167, 'name': PowerOffVM_Task, 'duration_secs': 0.198434} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.531339] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1002.531509] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1002.531760] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67438504-49e2-4fc1-9598-597c4dd6dc38 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.613378] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1002.613378] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1002.613543] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Deleting the datastore file [datastore1] bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.613863] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6bbe072d-df17-429d-99fd-805d6aa131ec {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.622149] env[61898]: DEBUG oslo_vmware.api [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for the task: (returnval){ [ 1002.622149] env[61898]: value = "task-1241171" [ 1002.622149] env[61898]: _type = "Task" [ 1002.622149] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.627513] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.627778] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf973b6c-1609-49fd-97cc-4cb05cb41142 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.635398] env[61898]: DEBUG oslo_vmware.api [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241171, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.639196] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 1002.639196] env[61898]: value = "task-1241172" [ 1002.639196] env[61898]: _type = "Task" [ 1002.639196] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.647343] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241172, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.744208] env[61898]: DEBUG nova.scheduler.client.report [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1002.833342] env[61898]: DEBUG oslo_vmware.api [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241169, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185994} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.833643] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.833829] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1002.834018] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1002.834203] env[61898]: INFO nova.compute.manager [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1002.834446] env[61898]: DEBUG oslo.service.loopingcall [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.834640] env[61898]: DEBUG nova.compute.manager [-] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1002.834735] env[61898]: DEBUG nova.network.neutron [-] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1003.001721] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.001832] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.001920] env[61898]: DEBUG nova.network.neutron [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1003.134153] env[61898]: DEBUG oslo_vmware.api [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Task: {'id': task-1241171, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280296} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.138033] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1003.138033] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1003.138033] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1003.138033] env[61898]: INFO nova.compute.manager [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1003.138033] env[61898]: DEBUG oslo.service.loopingcall [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1003.138033] env[61898]: DEBUG nova.compute.manager [-] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1003.138033] env[61898]: DEBUG nova.network.neutron [-] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1003.147736] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241172, 'name': PowerOnVM_Task} progress is 90%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.251242] env[61898]: DEBUG oslo_concurrency.lockutils [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.252423] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.779s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.290745] env[61898]: INFO nova.scheduler.client.report [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Deleted allocations for instance 7eb0d534-90c8-439d-a894-3f03151ac74b [ 1003.492501] env[61898]: DEBUG nova.compute.manager [req-bde982dc-3783-488a-ac03-a4ebb58592f3 req-c6132faa-7e59-4b6a-a130-e0a563b9aaa4 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Received event network-vif-deleted-62f1251d-f84b-4c28-ab74-971fef0d640f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1003.492707] env[61898]: INFO nova.compute.manager [req-bde982dc-3783-488a-ac03-a4ebb58592f3 req-c6132faa-7e59-4b6a-a130-e0a563b9aaa4 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Neutron deleted interface 62f1251d-f84b-4c28-ab74-971fef0d640f; detaching it from the instance and deleting it from the info cache [ 1003.492883] env[61898]: DEBUG nova.network.neutron [req-bde982dc-3783-488a-ac03-a4ebb58592f3 req-c6132faa-7e59-4b6a-a130-e0a563b9aaa4 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.551768] env[61898]: DEBUG nova.network.neutron [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1003.650479] env[61898]: DEBUG oslo_vmware.api [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241172, 'name': PowerOnVM_Task, 'duration_secs': 0.603363} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.650803] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.650991] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c2912544-8880-4a79-853a-ac9715a9e0ab tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance '008bab4f-240b-4cb7-86eb-9b1f01ea6e4c' progress to 100 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1003.725546] env[61898]: DEBUG nova.network.neutron [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updating instance_info_cache with network_info: [{"id": "fc927434-188b-4c42-9200-bcb870385a25", "address": "fa:16:3e:f0:b0:60", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc927434-18", "ovs_interfaceid": "fc927434-188b-4c42-9200-bcb870385a25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.783927] env[61898]: DEBUG nova.network.neutron [-] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.798658] env[61898]: DEBUG oslo_concurrency.lockutils [None req-363be356-8f4c-448a-ac17-09cd121e5987 tempest-ImagesTestJSON-1589173607 tempest-ImagesTestJSON-1589173607-project-member] Lock "7eb0d534-90c8-439d-a894-3f03151ac74b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.860s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.912011] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e14640d-31e2-43fd-a78e-9485bcd1ed54 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.921032] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8c9d76-66fc-4842-9c6c-ff18246c8970 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.956536] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a421a738-aa02-4ca6-8eea-f16407f91ab1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.963282] env[61898]: DEBUG nova.network.neutron [-] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.970418] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33d3aaf-6fbb-449d-b7e0-934bf194da57 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.990513] env[61898]: DEBUG nova.compute.provider_tree [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.995281] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55ae7578-feef-4697-88f5-a8b4ff2ccd34 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.006380] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9d273a-357b-44dd-99cf-cd855d5c73b4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.036510] env[61898]: DEBUG nova.compute.manager [req-bde982dc-3783-488a-ac03-a4ebb58592f3 req-c6132faa-7e59-4b6a-a130-e0a563b9aaa4 service nova] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Detach interface failed, port_id=62f1251d-f84b-4c28-ab74-971fef0d640f, reason: Instance bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1004.229213] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.229594] env[61898]: DEBUG nova.compute.manager [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Instance network_info: |[{"id": "fc927434-188b-4c42-9200-bcb870385a25", "address": "fa:16:3e:f0:b0:60", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc927434-18", "ovs_interfaceid": "fc927434-188b-4c42-9200-bcb870385a25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1004.230057] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:b0:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc927434-188b-4c42-9200-bcb870385a25', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.237625] env[61898]: DEBUG oslo.service.loopingcall [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.237898] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1004.238687] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78d96bfd-7e83-417c-8e28-3376f9bb51dc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.261932] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.261932] env[61898]: value = "task-1241174" [ 1004.261932] env[61898]: _type = "Task" [ 1004.261932] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.270637] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241174, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.286321] env[61898]: INFO nova.compute.manager [-] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Took 1.45 seconds to deallocate network for instance. [ 1004.464137] env[61898]: DEBUG nova.compute.manager [req-09650d99-9936-4b62-9eca-cd0d77309fff req-fad8746f-3c60-4639-b082-e1faf0af8d87 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Received event network-changed-fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1004.464496] env[61898]: DEBUG nova.compute.manager [req-09650d99-9936-4b62-9eca-cd0d77309fff req-fad8746f-3c60-4639-b082-e1faf0af8d87 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Refreshing instance network info cache due to event network-changed-fc927434-188b-4c42-9200-bcb870385a25. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1004.464852] env[61898]: DEBUG oslo_concurrency.lockutils [req-09650d99-9936-4b62-9eca-cd0d77309fff req-fad8746f-3c60-4639-b082-e1faf0af8d87 service nova] Acquiring lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.465125] env[61898]: DEBUG oslo_concurrency.lockutils [req-09650d99-9936-4b62-9eca-cd0d77309fff req-fad8746f-3c60-4639-b082-e1faf0af8d87 service nova] Acquired lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.465410] env[61898]: DEBUG nova.network.neutron [req-09650d99-9936-4b62-9eca-cd0d77309fff req-fad8746f-3c60-4639-b082-e1faf0af8d87 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Refreshing network info cache for port fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1004.468926] env[61898]: INFO nova.compute.manager [-] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Took 1.33 seconds to deallocate network for instance. [ 1004.493631] env[61898]: DEBUG nova.scheduler.client.report [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1004.772666] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241174, 'name': CreateVM_Task, 'duration_secs': 0.401172} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.772839] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.773765] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1004.773968] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.774358] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1004.774595] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49dae4ad-9067-461a-bfa2-a653c9e87e9c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.779744] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1004.779744] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5254187c-b895-41d9-2b4a-6d3c45749f8d" [ 1004.779744] env[61898]: _type = "Task" [ 1004.779744] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.788441] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5254187c-b895-41d9-2b4a-6d3c45749f8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.793250] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.977500] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.176023] env[61898]: DEBUG nova.network.neutron [req-09650d99-9936-4b62-9eca-cd0d77309fff req-fad8746f-3c60-4639-b082-e1faf0af8d87 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updated VIF entry in instance network info cache for port fc927434-188b-4c42-9200-bcb870385a25. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1005.177130] env[61898]: DEBUG nova.network.neutron [req-09650d99-9936-4b62-9eca-cd0d77309fff req-fad8746f-3c60-4639-b082-e1faf0af8d87 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updating instance_info_cache with network_info: [{"id": "fc927434-188b-4c42-9200-bcb870385a25", "address": "fa:16:3e:f0:b0:60", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc927434-18", "ovs_interfaceid": "fc927434-188b-4c42-9200-bcb870385a25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.291557] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5254187c-b895-41d9-2b4a-6d3c45749f8d, 'name': SearchDatastore_Task, 'duration_secs': 0.009798} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.291887] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.292147] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1005.292411] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1005.292600] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.292969] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1005.293124] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-049929fd-307f-439e-8a1a-d8d662362207 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.303216] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1005.303420] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1005.304239] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f2d167a-55af-4f99-bb5f-b0372c4c765b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.310658] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1005.310658] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52518620-3db8-22cb-bca0-f5f443b5488f" [ 1005.310658] env[61898]: _type = "Task" [ 1005.310658] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.319917] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52518620-3db8-22cb-bca0-f5f443b5488f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.504468] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.252s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1005.507301] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.714s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.507515] env[61898]: DEBUG nova.objects.instance [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lazy-loading 'resources' on Instance uuid 2fe9d97d-57e0-4b08-968b-4bb97a610fbb {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.680997] env[61898]: DEBUG oslo_concurrency.lockutils [req-09650d99-9936-4b62-9eca-cd0d77309fff req-fad8746f-3c60-4639-b082-e1faf0af8d87 service nova] Releasing lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1005.681319] env[61898]: DEBUG nova.compute.manager [req-09650d99-9936-4b62-9eca-cd0d77309fff req-fad8746f-3c60-4639-b082-e1faf0af8d87 service nova] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Received event network-vif-deleted-dc9bbcfc-c612-44f6-a5bf-686cfcf5fb32 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1005.822367] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52518620-3db8-22cb-bca0-f5f443b5488f, 'name': SearchDatastore_Task, 'duration_secs': 0.009393} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.822947] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bf48a1b-ad35-43a2-9ddc-c1a64a633c05 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.828810] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1005.828810] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5216e8c9-eb96-ee00-df64-467263bba46d" [ 1005.828810] env[61898]: _type = "Task" [ 1005.828810] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.837602] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5216e8c9-eb96-ee00-df64-467263bba46d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.027491] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Volume attach. Driver type: vmdk {{(pid=61898) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1006.027818] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267714', 'volume_id': 'e8160b82-c406-4acc-af77-282250d77399', 'name': 'volume-e8160b82-c406-4acc-af77-282250d77399', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '622326f9-b3c5-452e-b7f6-dfe6de1e7d4b', 'attached_at': '', 'detached_at': '', 'volume_id': 'e8160b82-c406-4acc-af77-282250d77399', 'serial': 'e8160b82-c406-4acc-af77-282250d77399'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1006.028799] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5c8d7e-726e-4ea6-9165-3b67c97de431 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.065606] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e7a5cb-a2e2-4aad-9ed9-3e9478684da9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.073493] env[61898]: INFO nova.scheduler.client.report [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted allocation for migration 8c71a041-862c-4011-981e-e1a3c351dce3 [ 1006.094818] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] volume-e8160b82-c406-4acc-af77-282250d77399/volume-e8160b82-c406-4acc-af77-282250d77399.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.100059] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-877af9f4-b349-4c94-9ed3-2636903e7902 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.123081] env[61898]: DEBUG oslo_vmware.api [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 1006.123081] env[61898]: value = "task-1241175" [ 1006.123081] env[61898]: _type = "Task" [ 1006.123081] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.135720] env[61898]: DEBUG oslo_vmware.api [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241175, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.210452] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ec012c-7084-4a60-82b6-457ce1085423 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.218475] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79691f20-b6a7-41e2-baf8-6b98a1e2ee59 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.250716] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.250983] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.251237] env[61898]: DEBUG nova.compute.manager [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Going to confirm migration 4 {{(pid=61898) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5112}} [ 1006.255192] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c096f5f-25fd-4333-ab3d-0fdc452f86a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.266922] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2b510e-9d46-4fe0-b6eb-df0fd5161f41 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.285144] env[61898]: DEBUG nova.compute.provider_tree [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.340164] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5216e8c9-eb96-ee00-df64-467263bba46d, 'name': SearchDatastore_Task, 'duration_secs': 0.009631} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.340453] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1006.340712] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 523a29df-e21d-4e38-9437-ebcdd7012f57/523a29df-e21d-4e38-9437-ebcdd7012f57.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1006.340972] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30d309f5-393b-4ba5-9c69-6cefbe76e2ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.348697] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1006.348697] env[61898]: value = "task-1241176" [ 1006.348697] env[61898]: _type = "Task" [ 1006.348697] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.357079] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241176, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.617050] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0269fe06-08c3-4ed4-8a09-ef840a9f9193 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.520s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.635988] env[61898]: DEBUG oslo_vmware.api [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241175, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.788532] env[61898]: DEBUG nova.scheduler.client.report [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1006.796392] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "c26c4add-728c-45ea-8465-7c4273b7d97b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.796392] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.796392] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "c26c4add-728c-45ea-8465-7c4273b7d97b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.796392] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.796392] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.796982] env[61898]: INFO nova.compute.manager [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Terminating instance [ 1006.858771] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241176, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470109} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.859255] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 523a29df-e21d-4e38-9437-ebcdd7012f57/523a29df-e21d-4e38-9437-ebcdd7012f57.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.859444] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.859764] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5cc5e751-955e-43d4-83fa-75647f85f0c0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.867623] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1006.867623] env[61898]: value = "task-1241177" [ 1006.867623] env[61898]: _type = "Task" [ 1006.867623] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.875791] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241177, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.878408] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.878683] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquired lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.878765] env[61898]: DEBUG nova.network.neutron [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1006.878952] env[61898]: DEBUG nova.objects.instance [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lazy-loading 'info_cache' on Instance uuid 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.134130] env[61898]: DEBUG oslo_vmware.api [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241175, 'name': ReconfigVM_Task, 'duration_secs': 0.599481} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.134427] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Reconfigured VM instance instance-0000005f to attach disk [datastore2] volume-e8160b82-c406-4acc-af77-282250d77399/volume-e8160b82-c406-4acc-af77-282250d77399.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1007.139188] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eff138b8-3b5e-46b2-8974-106296152b6e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.155182] env[61898]: DEBUG oslo_vmware.api [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 1007.155182] env[61898]: value = "task-1241178" [ 1007.155182] env[61898]: _type = "Task" [ 1007.155182] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.164102] env[61898]: DEBUG oslo_vmware.api [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241178, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.282390] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "6fdd2128-9823-4a64-a49a-9f327d63994d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.282644] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "6fdd2128-9823-4a64-a49a-9f327d63994d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.295984] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.789s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.298482] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.321s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.298718] env[61898]: DEBUG nova.objects.instance [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lazy-loading 'resources' on Instance uuid bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.304069] env[61898]: DEBUG nova.compute.manager [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1007.304286] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.306864] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e91785-350f-4aa8-b489-47da75d146e9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.314432] env[61898]: INFO nova.scheduler.client.report [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted allocations for instance 2fe9d97d-57e0-4b08-968b-4bb97a610fbb [ 1007.318517] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.320463] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfe9ffbe-cb04-4e85-90e3-793a38f59a79 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.330016] env[61898]: DEBUG oslo_vmware.api [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1007.330016] env[61898]: value = "task-1241179" [ 1007.330016] env[61898]: _type = "Task" [ 1007.330016] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.340457] env[61898]: DEBUG oslo_vmware.api [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241179, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.379496] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241177, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0659} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.379830] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1007.380700] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e9f8d5-e246-4d44-babd-7504472c7622 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.413195] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 523a29df-e21d-4e38-9437-ebcdd7012f57/523a29df-e21d-4e38-9437-ebcdd7012f57.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.413779] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bcafebe-5c60-4ce6-9c02-dfd460cef445 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.437340] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1007.437340] env[61898]: value = "task-1241180" [ 1007.437340] env[61898]: _type = "Task" [ 1007.437340] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.453102] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241180, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.668903] env[61898]: DEBUG oslo_vmware.api [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241178, 'name': ReconfigVM_Task, 'duration_secs': 0.184755} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.669270] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267714', 'volume_id': 'e8160b82-c406-4acc-af77-282250d77399', 'name': 'volume-e8160b82-c406-4acc-af77-282250d77399', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '622326f9-b3c5-452e-b7f6-dfe6de1e7d4b', 'attached_at': '', 'detached_at': '', 'volume_id': 'e8160b82-c406-4acc-af77-282250d77399', 'serial': 'e8160b82-c406-4acc-af77-282250d77399'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1007.785667] env[61898]: DEBUG nova.compute.manager [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1007.827805] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c0cc29cb-b10f-4c40-b6de-687db2d51e50 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "2fe9d97d-57e0-4b08-968b-4bb97a610fbb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.641s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.849030] env[61898]: DEBUG oslo_vmware.api [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241179, 'name': PowerOffVM_Task, 'duration_secs': 0.212363} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.852052] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.852313] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1007.853087] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f57b88c9-a05a-4de2-bb79-14369676f579 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.927903] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1007.928236] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1007.928591] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleting the datastore file [datastore2] c26c4add-728c-45ea-8465-7c4273b7d97b {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1007.931806] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b00203dc-8743-4aed-9e6f-0b46ab39edb7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.944024] env[61898]: DEBUG oslo_vmware.api [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1007.944024] env[61898]: value = "task-1241182" [ 1007.944024] env[61898]: _type = "Task" [ 1007.944024] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.953794] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241180, 'name': ReconfigVM_Task, 'duration_secs': 0.343991} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.954706] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 523a29df-e21d-4e38-9437-ebcdd7012f57/523a29df-e21d-4e38-9437-ebcdd7012f57.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1007.955446] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e42f1a40-b55a-4df5-996f-4937b2b9182a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.960471] env[61898]: DEBUG oslo_vmware.api [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.968866] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1007.968866] env[61898]: value = "task-1241183" [ 1007.968866] env[61898]: _type = "Task" [ 1007.968866] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.978867] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241183, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.983970] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5c3cd6-4336-447a-b5b0-5cfd6e55bdaa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.989035] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ab26d1-597f-4050-b529-9d47cba1d7b8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.028748] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b106abd3-066e-47ca-93ac-db93392acb06 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.037738] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcdf7337-260f-45d1-932f-91ce329422d7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.055989] env[61898]: DEBUG nova.compute.provider_tree [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1008.264296] env[61898]: DEBUG nova.network.neutron [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance_info_cache with network_info: [{"id": "4d73c23b-b607-471d-a628-1fcb200b386c", "address": "fa:16:3e:da:c8:ce", "network": {"id": "aa46595d-80ac-4fd4-8230-34d352129d45", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1079963010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c2e835a924c438287e7626c34c2fb05", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69f65356-c85e-4b7f-ad28-7c7b5e8cf50c", "external-id": "nsx-vlan-transportzone-281", "segmentation_id": 281, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d73c23b-b6", "ovs_interfaceid": "4d73c23b-b607-471d-a628-1fcb200b386c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.306409] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.459806] env[61898]: DEBUG oslo_vmware.api [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219401} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.459806] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.459806] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.459806] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.459806] env[61898]: INFO nova.compute.manager [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1008.460051] env[61898]: DEBUG oslo.service.loopingcall [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1008.460281] env[61898]: DEBUG nova.compute.manager [-] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1008.460330] env[61898]: DEBUG nova.network.neutron [-] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1008.478979] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241183, 'name': Rename_Task, 'duration_secs': 0.183095} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.479438] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.479691] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14c93a09-49eb-4bc2-a8e0-ef1951c10816 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.490427] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1008.490427] env[61898]: value = "task-1241184" [ 1008.490427] env[61898]: _type = "Task" [ 1008.490427] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.499764] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.582646] env[61898]: ERROR nova.scheduler.client.report [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] [req-4410b5e0-ec95-4a10-b00d-03a77b971a30] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 79886f75-94e9-4bf0-9cbd-87f3715d3144. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4410b5e0-ec95-4a10-b00d-03a77b971a30"}]} [ 1008.607216] env[61898]: DEBUG nova.scheduler.client.report [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Refreshing inventories for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1008.628019] env[61898]: DEBUG nova.scheduler.client.report [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Updating ProviderTree inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1008.628019] env[61898]: DEBUG nova.compute.provider_tree [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1008.640863] env[61898]: DEBUG nova.scheduler.client.report [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Refreshing aggregate associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, aggregates: None {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1008.660351] env[61898]: DEBUG nova.scheduler.client.report [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Refreshing trait associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1008.724215] env[61898]: DEBUG nova.objects.instance [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'flavor' on Instance uuid 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.770175] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Releasing lock "refresh_cache-008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1008.770175] env[61898]: DEBUG nova.objects.instance [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lazy-loading 'migration_context' on Instance uuid 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.820672] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98658047-e04b-4134-92ef-d6d045e089a6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.830648] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67041448-26d3-45e8-b000-38aa9e4f8082 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.864147] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764ee1ba-d82e-4270-b302-126102a2194c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.872864] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f531aaf5-f032-41fc-b79b-aba29d906f47 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.894856] env[61898]: DEBUG nova.compute.provider_tree [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.003316] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241184, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.058589] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "d4189084-f73f-4857-a418-6eb7f5b90d83" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.059227] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "d4189084-f73f-4857-a418-6eb7f5b90d83" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.140912] env[61898]: DEBUG nova.compute.manager [req-c6857a2c-2662-47a1-8559-607e5e3b7181 req-b0ca997b-b4b4-4932-8962-7706e5b8f225 service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Received event network-vif-deleted-fa47b33a-e279-408b-bcd7-9165ff102179 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1009.141137] env[61898]: INFO nova.compute.manager [req-c6857a2c-2662-47a1-8559-607e5e3b7181 req-b0ca997b-b4b4-4932-8962-7706e5b8f225 service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Neutron deleted interface fa47b33a-e279-408b-bcd7-9165ff102179; detaching it from the instance and deleting it from the info cache [ 1009.141316] env[61898]: DEBUG nova.network.neutron [req-c6857a2c-2662-47a1-8559-607e5e3b7181 req-b0ca997b-b4b4-4932-8962-7706e5b8f225 service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.233191] env[61898]: DEBUG oslo_concurrency.lockutils [None req-287b2212-34f0-4528-a7dd-06bdd7c42cc6 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.823s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.273660] env[61898]: DEBUG nova.objects.base [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Object Instance<008bab4f-240b-4cb7-86eb-9b1f01ea6e4c> lazy-loaded attributes: info_cache,migration_context {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1009.274678] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b917d7e-2765-49b1-8080-42c1c9e5e432 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.305030] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84cebe93-f27f-4afa-9779-01a591fa4d9e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.311511] env[61898]: DEBUG oslo_vmware.api [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 1009.311511] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f9ff4c-a522-b219-aab6-0046840d5d26" [ 1009.311511] env[61898]: _type = "Task" [ 1009.311511] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.321967] env[61898]: DEBUG oslo_vmware.api [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f9ff4c-a522-b219-aab6-0046840d5d26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.398424] env[61898]: DEBUG nova.scheduler.client.report [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1009.456388] env[61898]: DEBUG nova.network.neutron [-] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.501190] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241184, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.564429] env[61898]: DEBUG nova.compute.manager [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1009.648239] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca7d2531-6d29-4679-9d92-9920d30c26af {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.663861] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238e1056-2b9f-43e3-a59c-59b57a08efd5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.694304] env[61898]: DEBUG nova.compute.manager [req-c6857a2c-2662-47a1-8559-607e5e3b7181 req-b0ca997b-b4b4-4932-8962-7706e5b8f225 service nova] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Detach interface failed, port_id=fa47b33a-e279-408b-bcd7-9165ff102179, reason: Instance c26c4add-728c-45ea-8465-7c4273b7d97b could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1009.821808] env[61898]: DEBUG oslo_vmware.api [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f9ff4c-a522-b219-aab6-0046840d5d26, 'name': SearchDatastore_Task, 'duration_secs': 0.012735} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.822377] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.903332] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.605s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.905762] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.599s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.907306] env[61898]: INFO nova.compute.claims [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1009.935051] env[61898]: INFO nova.scheduler.client.report [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Deleted allocations for instance bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52 [ 1009.960370] env[61898]: INFO nova.compute.manager [-] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Took 1.50 seconds to deallocate network for instance. [ 1010.000964] env[61898]: DEBUG oslo_vmware.api [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241184, 'name': PowerOnVM_Task, 'duration_secs': 1.165083} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.001449] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.001637] env[61898]: INFO nova.compute.manager [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1010.001673] env[61898]: DEBUG nova.compute.manager [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1010.003059] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5817f3-4e12-4b40-99d4-20d769ff7001 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.092458] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.443640] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5a851e45-d0d8-45de-849f-4b07b8af297c tempest-AttachInterfacesTestJSON-132278315 tempest-AttachInterfacesTestJSON-132278315-project-member] Lock "bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.960s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.468283] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.522026] env[61898]: INFO nova.compute.manager [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Took 21.14 seconds to build instance. [ 1010.761090] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.761343] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.024069] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ba2c3812-e692-414c-b71a-88bd2c5462dc tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.651s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.025755] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d2aa77-51cc-49f3-8d80-4293464a80ea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.034317] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9372aaf0-be90-45f8-8f97-89497119ada6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.068766] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13cbbd2-a7b4-40ff-9757-a4f8112699f8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.077047] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18ed4ae-a56d-4fc4-9d8d-7f16ed263f78 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.090343] env[61898]: DEBUG nova.compute.provider_tree [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.264921] env[61898]: DEBUG nova.compute.utils [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1011.593645] env[61898]: DEBUG nova.scheduler.client.report [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1011.768053] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.098960] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.099586] env[61898]: DEBUG nova.compute.manager [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1012.102349] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.280s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.606984] env[61898]: DEBUG nova.compute.utils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1012.610023] env[61898]: DEBUG nova.compute.manager [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1012.610283] env[61898]: DEBUG nova.network.neutron [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1012.650072] env[61898]: DEBUG nova.policy [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a910d0cdf3cd4b17af818abd25a38b79', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3ce0562f486e44cc877c1cc31525a13a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 1012.739731] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27e7844-ae85-40d4-bdbb-4179dfdb2223 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.746591] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29074ae6-cd31-467d-a83e-7e4e9da4d3b6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.778483] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7832d30-46ff-4cae-83f3-57d9790597de {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.787360] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e991b7-aca0-4393-86a1-4a7b7c22513f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.802197] env[61898]: DEBUG nova.compute.provider_tree [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.830842] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.831141] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.831399] env[61898]: INFO nova.compute.manager [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Attaching volume 35f6f4d6-c25a-40c2-b190-500885a5b85e to /dev/sdc [ 1012.881772] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7a6c39-f2ee-409f-8315-b2f0bea1aa36 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.890202] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a364d63-355f-4b95-aa0a-3f4c1c8ecd88 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.906356] env[61898]: DEBUG nova.virt.block_device [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Updating existing volume attachment record: 55e3665a-0290-4496-ae66-0332af3831a6 {{(pid=61898) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1012.910816] env[61898]: DEBUG nova.network.neutron [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Successfully created port: 8ec141e6-276b-46c5-a0a1-047d2b513e9e {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1013.110704] env[61898]: DEBUG nova.compute.manager [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1013.305404] env[61898]: DEBUG nova.scheduler.client.report [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1014.120021] env[61898]: DEBUG nova.compute.manager [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1014.160490] env[61898]: DEBUG nova.virt.hardware [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1014.160490] env[61898]: DEBUG nova.virt.hardware [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1014.160490] env[61898]: DEBUG nova.virt.hardware [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1014.160490] env[61898]: DEBUG nova.virt.hardware [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1014.160490] env[61898]: DEBUG nova.virt.hardware [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1014.160780] env[61898]: DEBUG nova.virt.hardware [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1014.160845] env[61898]: DEBUG nova.virt.hardware [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1014.161019] env[61898]: DEBUG nova.virt.hardware [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1014.161198] env[61898]: DEBUG nova.virt.hardware [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1014.161367] env[61898]: DEBUG nova.virt.hardware [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1014.161543] env[61898]: DEBUG nova.virt.hardware [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1014.162456] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47596289-1ae6-4c48-96d2-68597105881c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.171784] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64395494-00aa-43e1-bdd0-d9af80060a58 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.320538] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.218s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.324849] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.232s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.326585] env[61898]: INFO nova.compute.claims [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1014.350520] env[61898]: DEBUG nova.compute.manager [req-24f548b5-3f3c-4ed1-a9f7-55fe924e0a89 req-124989be-0074-4f7c-a71e-72ebcb1760ef service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Received event network-changed-fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1014.350734] env[61898]: DEBUG nova.compute.manager [req-24f548b5-3f3c-4ed1-a9f7-55fe924e0a89 req-124989be-0074-4f7c-a71e-72ebcb1760ef service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Refreshing instance network info cache due to event network-changed-fc927434-188b-4c42-9200-bcb870385a25. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1014.350952] env[61898]: DEBUG oslo_concurrency.lockutils [req-24f548b5-3f3c-4ed1-a9f7-55fe924e0a89 req-124989be-0074-4f7c-a71e-72ebcb1760ef service nova] Acquiring lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.352323] env[61898]: DEBUG oslo_concurrency.lockutils [req-24f548b5-3f3c-4ed1-a9f7-55fe924e0a89 req-124989be-0074-4f7c-a71e-72ebcb1760ef service nova] Acquired lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.352518] env[61898]: DEBUG nova.network.neutron [req-24f548b5-3f3c-4ed1-a9f7-55fe924e0a89 req-124989be-0074-4f7c-a71e-72ebcb1760ef service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Refreshing network info cache for port fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1014.379172] env[61898]: DEBUG nova.compute.manager [req-a29dccb4-526f-425a-b670-0fef0cadeb76 req-491b155c-a72d-4706-9563-afa6944d0b9f service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Received event network-vif-plugged-8ec141e6-276b-46c5-a0a1-047d2b513e9e {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1014.379172] env[61898]: DEBUG oslo_concurrency.lockutils [req-a29dccb4-526f-425a-b670-0fef0cadeb76 req-491b155c-a72d-4706-9563-afa6944d0b9f service nova] Acquiring lock "6fdd2128-9823-4a64-a49a-9f327d63994d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.379172] env[61898]: DEBUG oslo_concurrency.lockutils [req-a29dccb4-526f-425a-b670-0fef0cadeb76 req-491b155c-a72d-4706-9563-afa6944d0b9f service nova] Lock "6fdd2128-9823-4a64-a49a-9f327d63994d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.379172] env[61898]: DEBUG oslo_concurrency.lockutils [req-a29dccb4-526f-425a-b670-0fef0cadeb76 req-491b155c-a72d-4706-9563-afa6944d0b9f service nova] Lock "6fdd2128-9823-4a64-a49a-9f327d63994d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.379172] env[61898]: DEBUG nova.compute.manager [req-a29dccb4-526f-425a-b670-0fef0cadeb76 req-491b155c-a72d-4706-9563-afa6944d0b9f service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] No waiting events found dispatching network-vif-plugged-8ec141e6-276b-46c5-a0a1-047d2b513e9e {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1014.379172] env[61898]: WARNING nova.compute.manager [req-a29dccb4-526f-425a-b670-0fef0cadeb76 req-491b155c-a72d-4706-9563-afa6944d0b9f service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Received unexpected event network-vif-plugged-8ec141e6-276b-46c5-a0a1-047d2b513e9e for instance with vm_state building and task_state spawning. [ 1014.895256] env[61898]: INFO nova.scheduler.client.report [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleted allocation for migration eead421f-5e2c-42f2-b3f8-23cd94168a40 [ 1014.910111] env[61898]: DEBUG nova.network.neutron [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Successfully updated port: 8ec141e6-276b-46c5-a0a1-047d2b513e9e {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1015.148939] env[61898]: DEBUG nova.compute.manager [req-4ae06a15-5cb6-48ce-9424-a994b7510a6c req-43faf7fd-82b0-4989-896b-dea656db2d2d service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Received event network-changed-8ec141e6-276b-46c5-a0a1-047d2b513e9e {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1015.148939] env[61898]: DEBUG nova.compute.manager [req-4ae06a15-5cb6-48ce-9424-a994b7510a6c req-43faf7fd-82b0-4989-896b-dea656db2d2d service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Refreshing instance network info cache due to event network-changed-8ec141e6-276b-46c5-a0a1-047d2b513e9e. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1015.148939] env[61898]: DEBUG oslo_concurrency.lockutils [req-4ae06a15-5cb6-48ce-9424-a994b7510a6c req-43faf7fd-82b0-4989-896b-dea656db2d2d service nova] Acquiring lock "refresh_cache-6fdd2128-9823-4a64-a49a-9f327d63994d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.148939] env[61898]: DEBUG oslo_concurrency.lockutils [req-4ae06a15-5cb6-48ce-9424-a994b7510a6c req-43faf7fd-82b0-4989-896b-dea656db2d2d service nova] Acquired lock "refresh_cache-6fdd2128-9823-4a64-a49a-9f327d63994d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.148939] env[61898]: DEBUG nova.network.neutron [req-4ae06a15-5cb6-48ce-9424-a994b7510a6c req-43faf7fd-82b0-4989-896b-dea656db2d2d service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Refreshing network info cache for port 8ec141e6-276b-46c5-a0a1-047d2b513e9e {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1015.297670] env[61898]: DEBUG nova.network.neutron [req-24f548b5-3f3c-4ed1-a9f7-55fe924e0a89 req-124989be-0074-4f7c-a71e-72ebcb1760ef service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updated VIF entry in instance network info cache for port fc927434-188b-4c42-9200-bcb870385a25. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1015.298084] env[61898]: DEBUG nova.network.neutron [req-24f548b5-3f3c-4ed1-a9f7-55fe924e0a89 req-124989be-0074-4f7c-a71e-72ebcb1760ef service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updating instance_info_cache with network_info: [{"id": "fc927434-188b-4c42-9200-bcb870385a25", "address": "fa:16:3e:f0:b0:60", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc927434-18", "ovs_interfaceid": "fc927434-188b-4c42-9200-bcb870385a25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.403489] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.152s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.414552] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "refresh_cache-6fdd2128-9823-4a64-a49a-9f327d63994d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1015.465620] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e4db72-180c-4799-bd79-424ecf802034 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.478017] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c408f901-e7e8-46ac-8796-e7fe68af4f64 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.509911] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c62d86-7227-41a8-af42-d2daf8f13f93 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.518660] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2043540-fd1f-477d-9695-8cf4aea5842e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.533208] env[61898]: DEBUG nova.compute.provider_tree [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.686853] env[61898]: DEBUG nova.network.neutron [req-4ae06a15-5cb6-48ce-9424-a994b7510a6c req-43faf7fd-82b0-4989-896b-dea656db2d2d service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1015.801234] env[61898]: DEBUG oslo_concurrency.lockutils [req-24f548b5-3f3c-4ed1-a9f7-55fe924e0a89 req-124989be-0074-4f7c-a71e-72ebcb1760ef service nova] Releasing lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.834538] env[61898]: DEBUG nova.network.neutron [req-4ae06a15-5cb6-48ce-9424-a994b7510a6c req-43faf7fd-82b0-4989-896b-dea656db2d2d service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.038261] env[61898]: DEBUG nova.scheduler.client.report [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1016.337045] env[61898]: DEBUG oslo_concurrency.lockutils [req-4ae06a15-5cb6-48ce-9424-a994b7510a6c req-43faf7fd-82b0-4989-896b-dea656db2d2d service nova] Releasing lock "refresh_cache-6fdd2128-9823-4a64-a49a-9f327d63994d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.337442] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "refresh_cache-6fdd2128-9823-4a64-a49a-9f327d63994d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.337599] env[61898]: DEBUG nova.network.neutron [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1016.543492] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.217s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.543492] env[61898]: DEBUG nova.compute.manager [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1016.546637] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.078s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.546637] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.576631] env[61898]: INFO nova.scheduler.client.report [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted allocations for instance c26c4add-728c-45ea-8465-7c4273b7d97b [ 1016.768485] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.768485] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.768485] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.768485] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.768828] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.771052] env[61898]: INFO nova.compute.manager [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Terminating instance [ 1016.882424] env[61898]: DEBUG nova.network.neutron [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1017.051291] env[61898]: DEBUG nova.compute.utils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1017.051905] env[61898]: DEBUG nova.compute.manager [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1017.052181] env[61898]: DEBUG nova.network.neutron [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1017.079676] env[61898]: DEBUG nova.network.neutron [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Updating instance_info_cache with network_info: [{"id": "8ec141e6-276b-46c5-a0a1-047d2b513e9e", "address": "fa:16:3e:32:cf:4e", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ec141e6-27", "ovs_interfaceid": "8ec141e6-276b-46c5-a0a1-047d2b513e9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.087588] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cd8afd49-04f1-4f46-b2d2-a05446f0a6ab tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "c26c4add-728c-45ea-8465-7c4273b7d97b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.295s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.121036] env[61898]: DEBUG nova.policy [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f25c631adf3c4f68b374a35b767a9429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30bd396aa1ff45ad946bc1a6fdb3b40b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 1017.274201] env[61898]: DEBUG nova.compute.manager [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1017.274485] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1017.275422] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6809cef3-ea3d-4ea1-85de-104402ee5b07 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.284111] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1017.284367] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f385e88-d869-45c1-885c-89ee5ff548f3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.290939] env[61898]: DEBUG oslo_vmware.api [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 1017.290939] env[61898]: value = "task-1241187" [ 1017.290939] env[61898]: _type = "Task" [ 1017.290939] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.299963] env[61898]: DEBUG oslo_vmware.api [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241187, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.463780] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Volume attach. Driver type: vmdk {{(pid=61898) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1017.463780] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267716', 'volume_id': '35f6f4d6-c25a-40c2-b190-500885a5b85e', 'name': 'volume-35f6f4d6-c25a-40c2-b190-500885a5b85e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '622326f9-b3c5-452e-b7f6-dfe6de1e7d4b', 'attached_at': '', 'detached_at': '', 'volume_id': '35f6f4d6-c25a-40c2-b190-500885a5b85e', 'serial': '35f6f4d6-c25a-40c2-b190-500885a5b85e'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1017.464415] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4c5045-434c-4b41-9a53-4da9bc43e06f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.483245] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8723f697-0f4a-48b8-a247-baf9a711087b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.516618] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] volume-35f6f4d6-c25a-40c2-b190-500885a5b85e/volume-35f6f4d6-c25a-40c2-b190-500885a5b85e.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1017.517767] env[61898]: DEBUG nova.network.neutron [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Successfully created port: 76d39d4e-c2b6-4f4a-a186-f426561711ea {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1017.520943] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1df896fc-f7fc-4004-a662-2a4f5e9f7047 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.544706] env[61898]: DEBUG oslo_vmware.api [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 1017.544706] env[61898]: value = "task-1241188" [ 1017.544706] env[61898]: _type = "Task" [ 1017.544706] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.553477] env[61898]: DEBUG oslo_vmware.api [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241188, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.557214] env[61898]: DEBUG nova.compute.manager [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1017.583112] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "refresh_cache-6fdd2128-9823-4a64-a49a-9f327d63994d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1017.583112] env[61898]: DEBUG nova.compute.manager [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Instance network_info: |[{"id": "8ec141e6-276b-46c5-a0a1-047d2b513e9e", "address": "fa:16:3e:32:cf:4e", "network": {"id": "1b72c921-cd73-4dd1-b61a-5128f3c34982", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-971074604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3ce0562f486e44cc877c1cc31525a13a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8ec141e6-27", "ovs_interfaceid": "8ec141e6-276b-46c5-a0a1-047d2b513e9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1017.583473] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:cf:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8ec141e6-276b-46c5-a0a1-047d2b513e9e', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1017.591441] env[61898]: DEBUG oslo.service.loopingcall [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1017.595065] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1017.595065] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27f1a3ed-bd6a-4d15-887c-c3952c9d6b23 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.615974] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1017.615974] env[61898]: value = "task-1241189" [ 1017.615974] env[61898]: _type = "Task" [ 1017.615974] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.626455] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241189, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.801902] env[61898]: DEBUG oslo_vmware.api [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241187, 'name': PowerOffVM_Task, 'duration_secs': 0.221156} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.802220] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1017.802430] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1017.802707] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76034817-b2f1-4a5c-968d-c51d9610b3ff {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.879978] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1017.880250] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1017.880444] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleting the datastore file [datastore2] 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1017.880764] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3901a04-2b86-4931-88c0-3791ea996092 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.890389] env[61898]: DEBUG oslo_vmware.api [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for the task: (returnval){ [ 1017.890389] env[61898]: value = "task-1241191" [ 1017.890389] env[61898]: _type = "Task" [ 1017.890389] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.899054] env[61898]: DEBUG oslo_vmware.api [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241191, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.055017] env[61898]: DEBUG oslo_vmware.api [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241188, 'name': ReconfigVM_Task, 'duration_secs': 0.485191} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.055662] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Reconfigured VM instance instance-0000005f to attach disk [datastore2] volume-35f6f4d6-c25a-40c2-b190-500885a5b85e/volume-35f6f4d6-c25a-40c2-b190-500885a5b85e.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.063156] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad5ad4e7-9a5c-4139-bc6c-1b62b81d8b8e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.081693] env[61898]: DEBUG oslo_vmware.api [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 1018.081693] env[61898]: value = "task-1241192" [ 1018.081693] env[61898]: _type = "Task" [ 1018.081693] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.090747] env[61898]: DEBUG oslo_vmware.api [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241192, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.126763] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241189, 'name': CreateVM_Task, 'duration_secs': 0.360304} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.127016] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1018.127742] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.127997] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.128373] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1018.128942] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7b66f5e-6974-4d95-a9be-af19c51b3b92 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.133806] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1018.133806] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52953819-778a-4815-690d-c3eda09e71fd" [ 1018.133806] env[61898]: _type = "Task" [ 1018.133806] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.142577] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52953819-778a-4815-690d-c3eda09e71fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.400294] env[61898]: DEBUG oslo_vmware.api [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Task: {'id': task-1241191, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18568} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.400650] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1018.400877] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1018.401105] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1018.401335] env[61898]: INFO nova.compute.manager [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1018.401588] env[61898]: DEBUG oslo.service.loopingcall [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1018.401784] env[61898]: DEBUG nova.compute.manager [-] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1018.401873] env[61898]: DEBUG nova.network.neutron [-] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1018.576071] env[61898]: DEBUG nova.compute.manager [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1018.591946] env[61898]: DEBUG oslo_vmware.api [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241192, 'name': ReconfigVM_Task, 'duration_secs': 0.280695} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.592290] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267716', 'volume_id': '35f6f4d6-c25a-40c2-b190-500885a5b85e', 'name': 'volume-35f6f4d6-c25a-40c2-b190-500885a5b85e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '622326f9-b3c5-452e-b7f6-dfe6de1e7d4b', 'attached_at': '', 'detached_at': '', 'volume_id': '35f6f4d6-c25a-40c2-b190-500885a5b85e', 'serial': '35f6f4d6-c25a-40c2-b190-500885a5b85e'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1018.602911] env[61898]: DEBUG nova.virt.hardware [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1018.603027] env[61898]: DEBUG nova.virt.hardware [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1018.603183] env[61898]: DEBUG nova.virt.hardware [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1018.603341] env[61898]: DEBUG nova.virt.hardware [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1018.603492] env[61898]: DEBUG nova.virt.hardware [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1018.603639] env[61898]: DEBUG nova.virt.hardware [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1018.603845] env[61898]: DEBUG nova.virt.hardware [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1018.604038] env[61898]: DEBUG nova.virt.hardware [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1018.604197] env[61898]: DEBUG nova.virt.hardware [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1018.604362] env[61898]: DEBUG nova.virt.hardware [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1018.604537] env[61898]: DEBUG nova.virt.hardware [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1018.605424] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6dace4-5a1b-474a-b032-505f5cc0171c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.616436] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52229292-7076-49a2-8b91-fb86a57291ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.644523] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52953819-778a-4815-690d-c3eda09e71fd, 'name': SearchDatastore_Task, 'duration_secs': 0.009895} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.644844] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.645096] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1018.645337] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.645487] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.645661] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.645929] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f09d9a6-d01e-465c-a6f7-6798da1ab070 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.652990] env[61898]: DEBUG nova.compute.manager [req-45b6ecf6-fa3d-4717-9036-12baab3dc8b3 req-c5c4195a-8a2a-4e9c-bae3-ff2176aba0cf service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Received event network-vif-deleted-4d73c23b-b607-471d-a628-1fcb200b386c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1018.653208] env[61898]: INFO nova.compute.manager [req-45b6ecf6-fa3d-4717-9036-12baab3dc8b3 req-c5c4195a-8a2a-4e9c-bae3-ff2176aba0cf service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Neutron deleted interface 4d73c23b-b607-471d-a628-1fcb200b386c; detaching it from the instance and deleting it from the info cache [ 1018.653451] env[61898]: DEBUG nova.network.neutron [req-45b6ecf6-fa3d-4717-9036-12baab3dc8b3 req-c5c4195a-8a2a-4e9c-bae3-ff2176aba0cf service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.660839] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1018.661027] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1018.661998] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ed3b6c6-3dd9-4e5b-ad6b-2a7dea13cf82 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.668490] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1018.668490] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d3363e-d965-8107-68f8-ee0427e2a542" [ 1018.668490] env[61898]: _type = "Task" [ 1018.668490] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.679124] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d3363e-d965-8107-68f8-ee0427e2a542, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.967593] env[61898]: DEBUG nova.compute.manager [req-e20eedfe-fd0a-4ae8-bf8e-bb90d946d5b1 req-50335ea1-122a-473b-addf-82f60d99fc3a service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Received event network-vif-plugged-76d39d4e-c2b6-4f4a-a186-f426561711ea {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1018.967748] env[61898]: DEBUG oslo_concurrency.lockutils [req-e20eedfe-fd0a-4ae8-bf8e-bb90d946d5b1 req-50335ea1-122a-473b-addf-82f60d99fc3a service nova] Acquiring lock "d4189084-f73f-4857-a418-6eb7f5b90d83-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.968062] env[61898]: DEBUG oslo_concurrency.lockutils [req-e20eedfe-fd0a-4ae8-bf8e-bb90d946d5b1 req-50335ea1-122a-473b-addf-82f60d99fc3a service nova] Lock "d4189084-f73f-4857-a418-6eb7f5b90d83-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.968291] env[61898]: DEBUG oslo_concurrency.lockutils [req-e20eedfe-fd0a-4ae8-bf8e-bb90d946d5b1 req-50335ea1-122a-473b-addf-82f60d99fc3a service nova] Lock "d4189084-f73f-4857-a418-6eb7f5b90d83-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.968591] env[61898]: DEBUG nova.compute.manager [req-e20eedfe-fd0a-4ae8-bf8e-bb90d946d5b1 req-50335ea1-122a-473b-addf-82f60d99fc3a service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] No waiting events found dispatching network-vif-plugged-76d39d4e-c2b6-4f4a-a186-f426561711ea {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1018.968814] env[61898]: WARNING nova.compute.manager [req-e20eedfe-fd0a-4ae8-bf8e-bb90d946d5b1 req-50335ea1-122a-473b-addf-82f60d99fc3a service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Received unexpected event network-vif-plugged-76d39d4e-c2b6-4f4a-a186-f426561711ea for instance with vm_state building and task_state spawning. [ 1019.055126] env[61898]: DEBUG nova.network.neutron [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Successfully updated port: 76d39d4e-c2b6-4f4a-a186-f426561711ea {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1019.122056] env[61898]: DEBUG nova.network.neutron [-] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.156033] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ed9b438f-35ac-40e8-8513-bf4bc07738b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.165765] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee658f5f-7371-4e04-b7d5-ec8c6fa78754 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.184646] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d3363e-d965-8107-68f8-ee0427e2a542, 'name': SearchDatastore_Task, 'duration_secs': 0.033776} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.185454] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f7c2266-61a6-4bfa-8e46-4810294db628 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.191190] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1019.191190] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f6bb31-5205-1f5f-a887-f18d2d9ebecb" [ 1019.191190] env[61898]: _type = "Task" [ 1019.191190] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.197659] env[61898]: DEBUG nova.compute.manager [req-45b6ecf6-fa3d-4717-9036-12baab3dc8b3 req-c5c4195a-8a2a-4e9c-bae3-ff2176aba0cf service nova] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Detach interface failed, port_id=4d73c23b-b607-471d-a628-1fcb200b386c, reason: Instance 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1019.205677] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f6bb31-5205-1f5f-a887-f18d2d9ebecb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.557387] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "refresh_cache-d4189084-f73f-4857-a418-6eb7f5b90d83" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1019.557616] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "refresh_cache-d4189084-f73f-4857-a418-6eb7f5b90d83" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.557789] env[61898]: DEBUG nova.network.neutron [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1019.626093] env[61898]: INFO nova.compute.manager [-] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Took 1.22 seconds to deallocate network for instance. [ 1019.630883] env[61898]: DEBUG nova.objects.instance [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'flavor' on Instance uuid 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.707618] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f6bb31-5205-1f5f-a887-f18d2d9ebecb, 'name': SearchDatastore_Task, 'duration_secs': 0.01889} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.707862] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.708205] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 6fdd2128-9823-4a64-a49a-9f327d63994d/6fdd2128-9823-4a64-a49a-9f327d63994d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1019.708931] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-770f47d8-c15b-4211-8a1a-7e21de23cd8c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.716568] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1019.716568] env[61898]: value = "task-1241193" [ 1019.716568] env[61898]: _type = "Task" [ 1019.716568] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.725097] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241193, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.093843] env[61898]: DEBUG nova.network.neutron [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1020.136652] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.136974] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.137139] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.142040] env[61898]: DEBUG oslo_concurrency.lockutils [None req-3118eb31-c9fc-413b-9be2-972b107199ae tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.311s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.162645] env[61898]: INFO nova.scheduler.client.report [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Deleted allocations for instance 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c [ 1020.227252] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241193, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44516} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.227529] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 6fdd2128-9823-4a64-a49a-9f327d63994d/6fdd2128-9823-4a64-a49a-9f327d63994d.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1020.227750] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1020.228040] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a46b0c4-d6b7-462a-a8e6-a03ec9def4d2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.234890] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1020.234890] env[61898]: value = "task-1241194" [ 1020.234890] env[61898]: _type = "Task" [ 1020.234890] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.242979] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241194, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.243840] env[61898]: DEBUG nova.network.neutron [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Updating instance_info_cache with network_info: [{"id": "76d39d4e-c2b6-4f4a-a186-f426561711ea", "address": "fa:16:3e:98:05:14", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76d39d4e-c2", "ovs_interfaceid": "76d39d4e-c2b6-4f4a-a186-f426561711ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.477943] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.478274] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.674165] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1013f697-a6c4-40db-82e5-5d69541ee38f tempest-DeleteServersTestJSON-1541309247 tempest-DeleteServersTestJSON-1541309247-project-member] Lock "008bab4f-240b-4cb7-86eb-9b1f01ea6e4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.906s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.745359] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241194, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.186753} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.745982] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "refresh_cache-d4189084-f73f-4857-a418-6eb7f5b90d83" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.746324] env[61898]: DEBUG nova.compute.manager [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Instance network_info: |[{"id": "76d39d4e-c2b6-4f4a-a186-f426561711ea", "address": "fa:16:3e:98:05:14", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76d39d4e-c2", "ovs_interfaceid": "76d39d4e-c2b6-4f4a-a186-f426561711ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1020.746609] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1020.747060] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:05:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4223acd2-30f7-440e-b975-60b30d931694', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76d39d4e-c2b6-4f4a-a186-f426561711ea', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1020.754608] env[61898]: DEBUG oslo.service.loopingcall [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.755294] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a8b39d-0b12-49be-ba39-c0f7bd1e37f7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.758156] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1020.758389] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9825b78d-d05b-4f5d-a43c-52b91e1dc986 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.794479] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 6fdd2128-9823-4a64-a49a-9f327d63994d/6fdd2128-9823-4a64-a49a-9f327d63994d.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1020.795957] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edfb37a6-7a01-40f1-9de0-d8b105845629 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.814061] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1020.814061] env[61898]: value = "task-1241195" [ 1020.814061] env[61898]: _type = "Task" [ 1020.814061] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.816893] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1020.816893] env[61898]: value = "task-1241196" [ 1020.816893] env[61898]: _type = "Task" [ 1020.816893] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.823282] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241195, 'name': CreateVM_Task} progress is 15%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.828312] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241196, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.982214] env[61898]: INFO nova.compute.manager [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Detaching volume e8160b82-c406-4acc-af77-282250d77399 [ 1020.997374] env[61898]: DEBUG nova.compute.manager [req-6e9e4c2d-cf4d-4c00-b76e-62d7469fba02 req-6514a8ec-30d8-431a-82e1-dfa9b69ac9d1 service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Received event network-changed-76d39d4e-c2b6-4f4a-a186-f426561711ea {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1020.997690] env[61898]: DEBUG nova.compute.manager [req-6e9e4c2d-cf4d-4c00-b76e-62d7469fba02 req-6514a8ec-30d8-431a-82e1-dfa9b69ac9d1 service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Refreshing instance network info cache due to event network-changed-76d39d4e-c2b6-4f4a-a186-f426561711ea. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1020.997690] env[61898]: DEBUG oslo_concurrency.lockutils [req-6e9e4c2d-cf4d-4c00-b76e-62d7469fba02 req-6514a8ec-30d8-431a-82e1-dfa9b69ac9d1 service nova] Acquiring lock "refresh_cache-d4189084-f73f-4857-a418-6eb7f5b90d83" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.997809] env[61898]: DEBUG oslo_concurrency.lockutils [req-6e9e4c2d-cf4d-4c00-b76e-62d7469fba02 req-6514a8ec-30d8-431a-82e1-dfa9b69ac9d1 service nova] Acquired lock "refresh_cache-d4189084-f73f-4857-a418-6eb7f5b90d83" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.998059] env[61898]: DEBUG nova.network.neutron [req-6e9e4c2d-cf4d-4c00-b76e-62d7469fba02 req-6514a8ec-30d8-431a-82e1-dfa9b69ac9d1 service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Refreshing network info cache for port 76d39d4e-c2b6-4f4a-a186-f426561711ea {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1021.019982] env[61898]: INFO nova.virt.block_device [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Attempting to driver detach volume e8160b82-c406-4acc-af77-282250d77399 from mountpoint /dev/sdb [ 1021.020288] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Volume detach. Driver type: vmdk {{(pid=61898) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1021.021053] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267714', 'volume_id': 'e8160b82-c406-4acc-af77-282250d77399', 'name': 'volume-e8160b82-c406-4acc-af77-282250d77399', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '622326f9-b3c5-452e-b7f6-dfe6de1e7d4b', 'attached_at': '', 'detached_at': '', 'volume_id': 'e8160b82-c406-4acc-af77-282250d77399', 'serial': 'e8160b82-c406-4acc-af77-282250d77399'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1021.021443] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ca4a72-74f1-497b-b5d2-ad549fa3b3dd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.050247] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9895f6db-e519-4f9d-b4bf-174728165717 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.059749] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede92c5f-0816-4425-a80a-7a19e503bbeb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.085623] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb58309-cbbb-4593-9e93-0e3bdec36818 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.101845] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] The volume has not been displaced from its original location: [datastore2] volume-e8160b82-c406-4acc-af77-282250d77399/volume-e8160b82-c406-4acc-af77-282250d77399.vmdk. No consolidation needed. {{(pid=61898) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1021.107483] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1021.107881] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c681f46-0919-48a8-bb2d-33f8371d722a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.128597] env[61898]: DEBUG oslo_vmware.api [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 1021.128597] env[61898]: value = "task-1241197" [ 1021.128597] env[61898]: _type = "Task" [ 1021.128597] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.137124] env[61898]: DEBUG oslo_vmware.api [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241197, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.324659] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241195, 'name': CreateVM_Task, 'duration_secs': 0.394444} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.325285] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1021.326116] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.326211] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.326520] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1021.326785] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea84276b-7b99-49ea-a99a-61730d5bdb03 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.331962] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241196, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.339639] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1021.339639] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e727ca-b390-4f68-623b-de7030f5f7b7" [ 1021.339639] env[61898]: _type = "Task" [ 1021.339639] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.351666] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e727ca-b390-4f68-623b-de7030f5f7b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.640780] env[61898]: DEBUG oslo_vmware.api [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241197, 'name': ReconfigVM_Task, 'duration_secs': 0.257612} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.641091] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1021.646597] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10720e0f-d9d0-4b12-ba06-5f8b41c24637 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.664025] env[61898]: DEBUG oslo_vmware.api [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 1021.664025] env[61898]: value = "task-1241198" [ 1021.664025] env[61898]: _type = "Task" [ 1021.664025] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.676439] env[61898]: DEBUG oslo_vmware.api [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241198, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.702943] env[61898]: DEBUG nova.network.neutron [req-6e9e4c2d-cf4d-4c00-b76e-62d7469fba02 req-6514a8ec-30d8-431a-82e1-dfa9b69ac9d1 service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Updated VIF entry in instance network info cache for port 76d39d4e-c2b6-4f4a-a186-f426561711ea. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1021.703638] env[61898]: DEBUG nova.network.neutron [req-6e9e4c2d-cf4d-4c00-b76e-62d7469fba02 req-6514a8ec-30d8-431a-82e1-dfa9b69ac9d1 service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Updating instance_info_cache with network_info: [{"id": "76d39d4e-c2b6-4f4a-a186-f426561711ea", "address": "fa:16:3e:98:05:14", "network": {"id": "496c3601-c04f-4883-b13b-8da53676ae98", "bridge": "br-int", "label": "tempest-ServersTestJSON-1343085709-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "30bd396aa1ff45ad946bc1a6fdb3b40b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4223acd2-30f7-440e-b975-60b30d931694", "external-id": "nsx-vlan-transportzone-647", "segmentation_id": 647, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76d39d4e-c2", "ovs_interfaceid": "76d39d4e-c2b6-4f4a-a186-f426561711ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.828207] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241196, 'name': ReconfigVM_Task, 'duration_secs': 0.528547} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.828544] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 6fdd2128-9823-4a64-a49a-9f327d63994d/6fdd2128-9823-4a64-a49a-9f327d63994d.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1021.829212] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6421561-b504-4ea2-8a6e-c90a756c4dab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.840338] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1021.840338] env[61898]: value = "task-1241199" [ 1021.840338] env[61898]: _type = "Task" [ 1021.840338] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.853802] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241199, 'name': Rename_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.857335] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e727ca-b390-4f68-623b-de7030f5f7b7, 'name': SearchDatastore_Task, 'duration_secs': 0.056799} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.857622] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.857891] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1021.858151] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.858312] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.858487] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1021.858758] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1904853-5d4a-4f46-aa1f-1c9854ca5cb4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.868647] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1021.868915] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1021.869647] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed93ba5c-303d-4454-ae24-0434d7e5f37f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.876431] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1021.876431] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52896799-c319-da6b-0ee1-e235c1dbbb51" [ 1021.876431] env[61898]: _type = "Task" [ 1021.876431] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.884808] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52896799-c319-da6b-0ee1-e235c1dbbb51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.174837] env[61898]: DEBUG oslo_vmware.api [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241198, 'name': ReconfigVM_Task, 'duration_secs': 0.155304} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.175312] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267714', 'volume_id': 'e8160b82-c406-4acc-af77-282250d77399', 'name': 'volume-e8160b82-c406-4acc-af77-282250d77399', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '622326f9-b3c5-452e-b7f6-dfe6de1e7d4b', 'attached_at': '', 'detached_at': '', 'volume_id': 'e8160b82-c406-4acc-af77-282250d77399', 'serial': 'e8160b82-c406-4acc-af77-282250d77399'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1022.206790] env[61898]: DEBUG oslo_concurrency.lockutils [req-6e9e4c2d-cf4d-4c00-b76e-62d7469fba02 req-6514a8ec-30d8-431a-82e1-dfa9b69ac9d1 service nova] Releasing lock "refresh_cache-d4189084-f73f-4857-a418-6eb7f5b90d83" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.354482] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241199, 'name': Rename_Task, 'duration_secs': 0.409673} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.354868] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1022.355212] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67481e2d-5ada-4fe0-b4c0-a808182d59b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.361711] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1022.361711] env[61898]: value = "task-1241200" [ 1022.361711] env[61898]: _type = "Task" [ 1022.361711] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.370125] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241200, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.386297] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52896799-c319-da6b-0ee1-e235c1dbbb51, 'name': SearchDatastore_Task, 'duration_secs': 0.009858} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.387051] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d6e8332-682e-428b-915b-41b3acc7070a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.399024] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1022.399024] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52231a15-9b02-c685-e690-5d3756b3741a" [ 1022.399024] env[61898]: _type = "Task" [ 1022.399024] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.405819] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52231a15-9b02-c685-e690-5d3756b3741a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.726018] env[61898]: DEBUG nova.objects.instance [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'flavor' on Instance uuid 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.874256] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241200, 'name': PowerOnVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.907400] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52231a15-9b02-c685-e690-5d3756b3741a, 'name': SearchDatastore_Task, 'duration_secs': 0.010431} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.907647] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.907958] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] d4189084-f73f-4857-a418-6eb7f5b90d83/d4189084-f73f-4857-a418-6eb7f5b90d83.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1022.908239] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a020a422-8bb7-4aa8-b8ea-5c9bc8668ff1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.916767] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1022.916767] env[61898]: value = "task-1241201" [ 1022.916767] env[61898]: _type = "Task" [ 1022.916767] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.928066] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241201, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.373755] env[61898]: DEBUG oslo_vmware.api [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241200, 'name': PowerOnVM_Task, 'duration_secs': 0.623642} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.374042] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1023.374326] env[61898]: INFO nova.compute.manager [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Took 9.25 seconds to spawn the instance on the hypervisor. [ 1023.374519] env[61898]: DEBUG nova.compute.manager [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1023.375319] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de4ee89-4291-4a21-b807-fb0b67f608f1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.427201] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241201, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462702} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.427902] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] d4189084-f73f-4857-a418-6eb7f5b90d83/d4189084-f73f-4857-a418-6eb7f5b90d83.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1023.427902] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1023.428119] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a7343bd-8561-4f82-b235-5b2209df162b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.436118] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1023.436118] env[61898]: value = "task-1241202" [ 1023.436118] env[61898]: _type = "Task" [ 1023.436118] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.447552] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241202, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.736297] env[61898]: DEBUG oslo_concurrency.lockutils [None req-9a9fc8c8-f0c3-4539-8f2f-c4afea7f109f tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.258s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.787433] env[61898]: DEBUG oslo_concurrency.lockutils [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.787433] env[61898]: DEBUG oslo_concurrency.lockutils [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.893875] env[61898]: INFO nova.compute.manager [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Took 15.60 seconds to build instance. [ 1023.946434] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241202, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063466} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.946716] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.947520] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d16d6b-32d1-4503-979c-25fa117b68af {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.969902] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] d4189084-f73f-4857-a418-6eb7f5b90d83/d4189084-f73f-4857-a418-6eb7f5b90d83.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.970235] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1aa82066-da5b-4aa3-89f8-7dda04a34de2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.992606] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1023.992606] env[61898]: value = "task-1241203" [ 1023.992606] env[61898]: _type = "Task" [ 1023.992606] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.001177] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241203, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.290633] env[61898]: INFO nova.compute.manager [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Detaching volume 35f6f4d6-c25a-40c2-b190-500885a5b85e [ 1024.331244] env[61898]: INFO nova.virt.block_device [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Attempting to driver detach volume 35f6f4d6-c25a-40c2-b190-500885a5b85e from mountpoint /dev/sdc [ 1024.331554] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Volume detach. Driver type: vmdk {{(pid=61898) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1024.331749] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267716', 'volume_id': '35f6f4d6-c25a-40c2-b190-500885a5b85e', 'name': 'volume-35f6f4d6-c25a-40c2-b190-500885a5b85e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '622326f9-b3c5-452e-b7f6-dfe6de1e7d4b', 'attached_at': '', 'detached_at': '', 'volume_id': '35f6f4d6-c25a-40c2-b190-500885a5b85e', 'serial': '35f6f4d6-c25a-40c2-b190-500885a5b85e'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1024.333016] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98edae3-a6a4-4352-8c49-40bde77a0562 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.359550] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa773b8-d59c-49e8-bca7-2b64ede43e81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.369037] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c044a50-fc02-44d4-aa53-be184874d1ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.393523] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7994090-9ddf-4524-aa05-457453c44911 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.396861] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c977e794-d13f-4bbd-adf1-90f9e95dce05 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "6fdd2128-9823-4a64-a49a-9f327d63994d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.114s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.415313] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] The volume has not been displaced from its original location: [datastore2] volume-35f6f4d6-c25a-40c2-b190-500885a5b85e/volume-35f6f4d6-c25a-40c2-b190-500885a5b85e.vmdk. No consolidation needed. {{(pid=61898) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1024.421273] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Reconfiguring VM instance instance-0000005f to detach disk 2002 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1024.421273] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e789bbb4-485d-422d-b398-c77c6637c3a2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.443909] env[61898]: DEBUG oslo_vmware.api [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 1024.443909] env[61898]: value = "task-1241204" [ 1024.443909] env[61898]: _type = "Task" [ 1024.443909] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.455368] env[61898]: DEBUG oslo_vmware.api [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241204, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.504122] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241203, 'name': ReconfigVM_Task, 'duration_secs': 0.324004} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.504536] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Reconfigured VM instance instance-00000069 to attach disk [datastore2] d4189084-f73f-4857-a418-6eb7f5b90d83/d4189084-f73f-4857-a418-6eb7f5b90d83.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.505552] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7fdcc567-6256-41a6-b623-60ba356b15e6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.514826] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1024.514826] env[61898]: value = "task-1241205" [ 1024.514826] env[61898]: _type = "Task" [ 1024.514826] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.525381] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241205, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.957493] env[61898]: DEBUG oslo_vmware.api [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241204, 'name': ReconfigVM_Task, 'duration_secs': 0.307401} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.957940] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Reconfigured VM instance instance-0000005f to detach disk 2002 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1024.963547] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18b929cd-2133-4fff-adcd-d1dccc58aaf2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.985599] env[61898]: DEBUG oslo_vmware.api [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 1024.985599] env[61898]: value = "task-1241206" [ 1024.985599] env[61898]: _type = "Task" [ 1024.985599] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.996509] env[61898]: DEBUG oslo_vmware.api [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241206, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.027275] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241205, 'name': Rename_Task, 'duration_secs': 0.148012} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.027700] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1025.028078] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e436470-3815-464b-bb14-6ec68c19782f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.036367] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1025.036367] env[61898]: value = "task-1241207" [ 1025.036367] env[61898]: _type = "Task" [ 1025.036367] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.049980] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241207, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.282377] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "6fdd2128-9823-4a64-a49a-9f327d63994d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.282736] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "6fdd2128-9823-4a64-a49a-9f327d63994d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.283016] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "6fdd2128-9823-4a64-a49a-9f327d63994d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.283276] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "6fdd2128-9823-4a64-a49a-9f327d63994d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.283507] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "6fdd2128-9823-4a64-a49a-9f327d63994d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.285864] env[61898]: INFO nova.compute.manager [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Terminating instance [ 1025.508521] env[61898]: DEBUG oslo_vmware.api [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241206, 'name': ReconfigVM_Task, 'duration_secs': 0.166654} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.508944] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267716', 'volume_id': '35f6f4d6-c25a-40c2-b190-500885a5b85e', 'name': 'volume-35f6f4d6-c25a-40c2-b190-500885a5b85e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '622326f9-b3c5-452e-b7f6-dfe6de1e7d4b', 'attached_at': '', 'detached_at': '', 'volume_id': '35f6f4d6-c25a-40c2-b190-500885a5b85e', 'serial': '35f6f4d6-c25a-40c2-b190-500885a5b85e'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1025.549853] env[61898]: DEBUG oslo_vmware.api [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241207, 'name': PowerOnVM_Task, 'duration_secs': 0.457414} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.549853] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.549853] env[61898]: INFO nova.compute.manager [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Took 6.97 seconds to spawn the instance on the hypervisor. [ 1025.549853] env[61898]: DEBUG nova.compute.manager [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1025.550464] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942f9424-0e0f-4a21-86c2-8fdbd5e05af8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.791108] env[61898]: DEBUG nova.compute.manager [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1025.791662] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1025.792629] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25265cf-b581-47c9-bf77-2d1cf9c499cd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.803366] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1025.803750] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aaa2c32b-ac2a-46e0-b456-9c9732e51dfd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.811831] env[61898]: DEBUG oslo_vmware.api [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1025.811831] env[61898]: value = "task-1241208" [ 1025.811831] env[61898]: _type = "Task" [ 1025.811831] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.824525] env[61898]: DEBUG oslo_vmware.api [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241208, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.065276] env[61898]: DEBUG nova.objects.instance [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'flavor' on Instance uuid 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.073445] env[61898]: INFO nova.compute.manager [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Took 16.00 seconds to build instance. [ 1026.322760] env[61898]: DEBUG oslo_vmware.api [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241208, 'name': PowerOffVM_Task, 'duration_secs': 0.36688} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.323067] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.323244] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1026.323504] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd222efb-dd62-47d4-9ba1-87b496230204 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.394304] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.394921] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.394921] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleting the datastore file [datastore2] 6fdd2128-9823-4a64-a49a-9f327d63994d {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.395342] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f0bfc8e-48b6-43a4-ad10-047770fef0d4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.405305] env[61898]: DEBUG oslo_vmware.api [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for the task: (returnval){ [ 1026.405305] env[61898]: value = "task-1241210" [ 1026.405305] env[61898]: _type = "Task" [ 1026.405305] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.414800] env[61898]: DEBUG oslo_vmware.api [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.576069] env[61898]: DEBUG oslo_concurrency.lockutils [None req-432f1035-1f22-4637-8d35-996c0adba527 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "d4189084-f73f-4857-a418-6eb7f5b90d83" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.517s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.916549] env[61898]: DEBUG oslo_vmware.api [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Task: {'id': task-1241210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148047} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.916878] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.917220] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1026.917286] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1026.917446] env[61898]: INFO nova.compute.manager [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1026.917766] env[61898]: DEBUG oslo.service.loopingcall [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1026.918046] env[61898]: DEBUG nova.compute.manager [-] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1026.918325] env[61898]: DEBUG nova.network.neutron [-] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1027.081433] env[61898]: DEBUG oslo_concurrency.lockutils [None req-52f735ac-70d2-4e39-a522-7d0fc24941b7 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.294s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.163096] env[61898]: DEBUG nova.compute.manager [req-d09c05c2-9766-4110-b830-3359ecd59cde req-a3a6a32e-56ee-46c6-993a-c376d1828f8c service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Received event network-vif-deleted-8ec141e6-276b-46c5-a0a1-047d2b513e9e {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1027.163096] env[61898]: INFO nova.compute.manager [req-d09c05c2-9766-4110-b830-3359ecd59cde req-a3a6a32e-56ee-46c6-993a-c376d1828f8c service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Neutron deleted interface 8ec141e6-276b-46c5-a0a1-047d2b513e9e; detaching it from the instance and deleting it from the info cache [ 1027.163257] env[61898]: DEBUG nova.network.neutron [req-d09c05c2-9766-4110-b830-3359ecd59cde req-a3a6a32e-56ee-46c6-993a-c376d1828f8c service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.185103] env[61898]: DEBUG oslo_concurrency.lockutils [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "d4189084-f73f-4857-a418-6eb7f5b90d83" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.185354] env[61898]: DEBUG oslo_concurrency.lockutils [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "d4189084-f73f-4857-a418-6eb7f5b90d83" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.185529] env[61898]: DEBUG nova.compute.manager [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1027.186408] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e6b190-41ef-4c41-b809-952c6465c336 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.193580] env[61898]: DEBUG nova.compute.manager [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=61898) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3403}} [ 1027.194110] env[61898]: DEBUG nova.objects.instance [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lazy-loading 'flavor' on Instance uuid d4189084-f73f-4857-a418-6eb7f5b90d83 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1027.355171] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.355790] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.371058] env[61898]: DEBUG oslo_concurrency.lockutils [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.371386] env[61898]: DEBUG oslo_concurrency.lockutils [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.371963] env[61898]: DEBUG oslo_concurrency.lockutils [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.372192] env[61898]: DEBUG oslo_concurrency.lockutils [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.372364] env[61898]: DEBUG oslo_concurrency.lockutils [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.374573] env[61898]: INFO nova.compute.manager [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Terminating instance [ 1027.642952] env[61898]: DEBUG nova.network.neutron [-] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.665587] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6921d5f-41be-475f-b944-0e7ab64bd367 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.678051] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f2a050-6565-4de9-8738-d0ab2a181b31 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.710048] env[61898]: DEBUG nova.compute.manager [req-d09c05c2-9766-4110-b830-3359ecd59cde req-a3a6a32e-56ee-46c6-993a-c376d1828f8c service nova] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Detach interface failed, port_id=8ec141e6-276b-46c5-a0a1-047d2b513e9e, reason: Instance 6fdd2128-9823-4a64-a49a-9f327d63994d could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1027.858544] env[61898]: DEBUG nova.compute.manager [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1027.881458] env[61898]: DEBUG nova.compute.manager [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1027.881760] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.884443] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1b35c9-d435-447f-840b-a5a2d1279142 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.892292] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.892558] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db1a5757-7221-4e25-a6c6-cdba4f289fa4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.899297] env[61898]: DEBUG oslo_vmware.api [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 1027.899297] env[61898]: value = "task-1241211" [ 1027.899297] env[61898]: _type = "Task" [ 1027.899297] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.908944] env[61898]: DEBUG oslo_vmware.api [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241211, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.146617] env[61898]: INFO nova.compute.manager [-] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Took 1.23 seconds to deallocate network for instance. [ 1028.204045] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.204045] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa1d6b83-f5d0-4af0-a09a-de877b1a584e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.211315] env[61898]: DEBUG oslo_vmware.api [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1028.211315] env[61898]: value = "task-1241212" [ 1028.211315] env[61898]: _type = "Task" [ 1028.211315] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.220364] env[61898]: DEBUG oslo_vmware.api [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.382567] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.382847] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.385950] env[61898]: INFO nova.compute.claims [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.409346] env[61898]: DEBUG oslo_vmware.api [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241211, 'name': PowerOffVM_Task, 'duration_secs': 0.170829} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.409639] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1028.409809] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1028.410087] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82b8e087-aeb3-475d-b848-fddcce3b98e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.477065] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1028.477065] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1028.477065] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Deleting the datastore file [datastore1] 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.477335] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63f5fd9c-a5e1-49e4-98ca-8ffc17ae0862 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.483686] env[61898]: DEBUG oslo_vmware.api [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for the task: (returnval){ [ 1028.483686] env[61898]: value = "task-1241214" [ 1028.483686] env[61898]: _type = "Task" [ 1028.483686] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.491493] env[61898]: DEBUG oslo_vmware.api [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241214, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.654745] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.720375] env[61898]: DEBUG oslo_vmware.api [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241212, 'name': PowerOffVM_Task, 'duration_secs': 0.234665} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.720715] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1028.720963] env[61898]: DEBUG nova.compute.manager [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1028.721767] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57c6aeb-8945-4dd3-b9c5-db87f42a59f4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.994129] env[61898]: DEBUG oslo_vmware.api [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Task: {'id': task-1241214, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126342} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.994416] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.994604] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.994785] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.994958] env[61898]: INFO nova.compute.manager [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1028.995220] env[61898]: DEBUG oslo.service.loopingcall [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.995418] env[61898]: DEBUG nova.compute.manager [-] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1028.995514] env[61898]: DEBUG nova.network.neutron [-] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1029.232533] env[61898]: DEBUG oslo_concurrency.lockutils [None req-692fc0ae-83e0-47ce-921f-123808c6d9d5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "d4189084-f73f-4857-a418-6eb7f5b90d83" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.047s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.451177] env[61898]: DEBUG nova.compute.manager [req-52f39e79-d45c-4b1b-b040-be18cf932aa6 req-ce4d9c72-042c-40f6-8b24-838324926fab service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Received event network-vif-deleted-508f7b5a-a3ef-4688-9918-45d566ba903a {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1029.451358] env[61898]: INFO nova.compute.manager [req-52f39e79-d45c-4b1b-b040-be18cf932aa6 req-ce4d9c72-042c-40f6-8b24-838324926fab service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Neutron deleted interface 508f7b5a-a3ef-4688-9918-45d566ba903a; detaching it from the instance and deleting it from the info cache [ 1029.451560] env[61898]: DEBUG nova.network.neutron [req-52f39e79-d45c-4b1b-b040-be18cf932aa6 req-ce4d9c72-042c-40f6-8b24-838324926fab service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.508168] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5466d9db-6f94-4d9c-8207-d63a3019aae3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.515687] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a296ad-9712-44b0-a599-e349722ab123 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.547836] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0146d7-99f3-434e-8e45-c9e2aa8481a9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.553423] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a87153b4-328d-4d09-96b3-8bc43f852ea3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.566586] env[61898]: DEBUG nova.compute.provider_tree [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.917064] env[61898]: DEBUG nova.network.neutron [-] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.957261] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-795865eb-286c-43ea-af1b-547eae8eefbe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.960147] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "d4189084-f73f-4857-a418-6eb7f5b90d83" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.960522] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "d4189084-f73f-4857-a418-6eb7f5b90d83" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.960737] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "d4189084-f73f-4857-a418-6eb7f5b90d83-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.960922] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "d4189084-f73f-4857-a418-6eb7f5b90d83-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.961110] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "d4189084-f73f-4857-a418-6eb7f5b90d83-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.963516] env[61898]: INFO nova.compute.manager [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Terminating instance [ 1029.970807] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae50ee7d-885e-4b99-bbcb-dd6942fb9532 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.998136] env[61898]: DEBUG nova.compute.manager [req-52f39e79-d45c-4b1b-b040-be18cf932aa6 req-ce4d9c72-042c-40f6-8b24-838324926fab service nova] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Detach interface failed, port_id=508f7b5a-a3ef-4688-9918-45d566ba903a, reason: Instance 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1030.069745] env[61898]: DEBUG nova.scheduler.client.report [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1030.419307] env[61898]: INFO nova.compute.manager [-] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Took 1.42 seconds to deallocate network for instance. [ 1030.467817] env[61898]: DEBUG nova.compute.manager [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1030.468127] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1030.469031] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036de0c1-64c7-4f59-a0b3-b238db974177 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.477107] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1030.477408] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db217a68-82e2-43c3-9294-6772b0955713 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.537190] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1030.537468] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1030.537658] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleting the datastore file [datastore2] d4189084-f73f-4857-a418-6eb7f5b90d83 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1030.538018] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bd78e14-4847-4368-b950-9455531cc9ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.544811] env[61898]: DEBUG oslo_vmware.api [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1030.544811] env[61898]: value = "task-1241216" [ 1030.544811] env[61898]: _type = "Task" [ 1030.544811] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.552657] env[61898]: DEBUG oslo_vmware.api [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.574611] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.192s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.575137] env[61898]: DEBUG nova.compute.manager [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1030.577820] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.923s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.578138] env[61898]: DEBUG nova.objects.instance [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lazy-loading 'resources' on Instance uuid 6fdd2128-9823-4a64-a49a-9f327d63994d {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.926073] env[61898]: DEBUG oslo_concurrency.lockutils [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.054469] env[61898]: DEBUG oslo_vmware.api [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241216, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123446} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.054747] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1031.054935] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1031.055132] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1031.055310] env[61898]: INFO nova.compute.manager [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1031.055553] env[61898]: DEBUG oslo.service.loopingcall [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1031.055749] env[61898]: DEBUG nova.compute.manager [-] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1031.055843] env[61898]: DEBUG nova.network.neutron [-] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1031.082008] env[61898]: DEBUG nova.compute.utils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1031.085974] env[61898]: DEBUG nova.compute.manager [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1031.086176] env[61898]: DEBUG nova.network.neutron [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1031.125775] env[61898]: DEBUG nova.policy [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b53a1aca504e4b7593420e25dd8602f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00a5473d225540e186d6778172a187cb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 1031.181181] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d2b4d0-9545-4b45-819f-9d0d6b1558c9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.188800] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51f6dc3-1d1b-449a-ac52-43660b9c1297 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.220103] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b293be44-3cf5-4480-982c-8ea8758aae01 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.227371] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b27cbd-2424-4bfa-b2b2-6f43978778c1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.242166] env[61898]: DEBUG nova.compute.provider_tree [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.416587] env[61898]: DEBUG nova.network.neutron [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Successfully created port: bd9582d2-676b-45d5-be6a-1883dd40c2ff {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1031.476369] env[61898]: DEBUG nova.compute.manager [req-4866bf94-6f0a-4a87-9ff3-30c7e2a85fd4 req-14cc7521-fd53-40a8-825f-16c374de98cc service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Received event network-vif-deleted-76d39d4e-c2b6-4f4a-a186-f426561711ea {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1031.476658] env[61898]: INFO nova.compute.manager [req-4866bf94-6f0a-4a87-9ff3-30c7e2a85fd4 req-14cc7521-fd53-40a8-825f-16c374de98cc service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Neutron deleted interface 76d39d4e-c2b6-4f4a-a186-f426561711ea; detaching it from the instance and deleting it from the info cache [ 1031.476938] env[61898]: DEBUG nova.network.neutron [req-4866bf94-6f0a-4a87-9ff3-30c7e2a85fd4 req-14cc7521-fd53-40a8-825f-16c374de98cc service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.586971] env[61898]: DEBUG nova.compute.manager [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1031.745363] env[61898]: DEBUG nova.scheduler.client.report [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1031.798794] env[61898]: DEBUG nova.network.neutron [-] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.980035] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb2712bd-f7c1-4399-867a-6e4ed6bd5f1c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.990022] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c91a42b-2d75-46b6-adad-156680faee48 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.017080] env[61898]: DEBUG nova.compute.manager [req-4866bf94-6f0a-4a87-9ff3-30c7e2a85fd4 req-14cc7521-fd53-40a8-825f-16c374de98cc service nova] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Detach interface failed, port_id=76d39d4e-c2b6-4f4a-a186-f426561711ea, reason: Instance d4189084-f73f-4857-a418-6eb7f5b90d83 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1032.250380] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.672s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.253369] env[61898]: DEBUG oslo_concurrency.lockutils [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.327s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.253709] env[61898]: DEBUG nova.objects.instance [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lazy-loading 'resources' on Instance uuid 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1032.273867] env[61898]: INFO nova.scheduler.client.report [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Deleted allocations for instance 6fdd2128-9823-4a64-a49a-9f327d63994d [ 1032.301862] env[61898]: INFO nova.compute.manager [-] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Took 1.25 seconds to deallocate network for instance. [ 1032.596500] env[61898]: DEBUG nova.compute.manager [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1032.624318] env[61898]: DEBUG nova.virt.hardware [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1032.624615] env[61898]: DEBUG nova.virt.hardware [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1032.624738] env[61898]: DEBUG nova.virt.hardware [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.624922] env[61898]: DEBUG nova.virt.hardware [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1032.625093] env[61898]: DEBUG nova.virt.hardware [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.625253] env[61898]: DEBUG nova.virt.hardware [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1032.625464] env[61898]: DEBUG nova.virt.hardware [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1032.625627] env[61898]: DEBUG nova.virt.hardware [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1032.625798] env[61898]: DEBUG nova.virt.hardware [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1032.625996] env[61898]: DEBUG nova.virt.hardware [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1032.626256] env[61898]: DEBUG nova.virt.hardware [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1032.627151] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5b73cb-c6d1-4deb-96d8-0ba42ac801ed {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.635457] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5a2bf1-67ca-40aa-910e-fffec49532d4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.782261] env[61898]: DEBUG oslo_concurrency.lockutils [None req-17415cd8-006c-4a26-bef7-de7e483989a9 tempest-ServerDiskConfigTestJSON-701470015 tempest-ServerDiskConfigTestJSON-701470015-project-member] Lock "6fdd2128-9823-4a64-a49a-9f327d63994d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.499s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.810091] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.864382] env[61898]: DEBUG nova.compute.manager [req-baac3b84-7be2-4abb-900d-c455ea10f1e4 req-f1ce72ca-ba07-4572-b98f-9f70c808d5b3 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Received event network-vif-plugged-bd9582d2-676b-45d5-be6a-1883dd40c2ff {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1032.865668] env[61898]: DEBUG oslo_concurrency.lockutils [req-baac3b84-7be2-4abb-900d-c455ea10f1e4 req-f1ce72ca-ba07-4572-b98f-9f70c808d5b3 service nova] Acquiring lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.866207] env[61898]: DEBUG oslo_concurrency.lockutils [req-baac3b84-7be2-4abb-900d-c455ea10f1e4 req-f1ce72ca-ba07-4572-b98f-9f70c808d5b3 service nova] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.866371] env[61898]: DEBUG oslo_concurrency.lockutils [req-baac3b84-7be2-4abb-900d-c455ea10f1e4 req-f1ce72ca-ba07-4572-b98f-9f70c808d5b3 service nova] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.866675] env[61898]: DEBUG nova.compute.manager [req-baac3b84-7be2-4abb-900d-c455ea10f1e4 req-f1ce72ca-ba07-4572-b98f-9f70c808d5b3 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] No waiting events found dispatching network-vif-plugged-bd9582d2-676b-45d5-be6a-1883dd40c2ff {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1032.867057] env[61898]: WARNING nova.compute.manager [req-baac3b84-7be2-4abb-900d-c455ea10f1e4 req-f1ce72ca-ba07-4572-b98f-9f70c808d5b3 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Received unexpected event network-vif-plugged-bd9582d2-676b-45d5-be6a-1883dd40c2ff for instance with vm_state building and task_state spawning. [ 1032.892953] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f83c93e-fe93-4c38-9f03-59f7a0674d18 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.902403] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4067df9b-98f2-48ec-a4b4-bc3dada84f88 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.934225] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60077a94-a31a-4772-9d7e-723a831bd24a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.943269] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3aea51-ddda-4cb5-bb7c-17450749c1cb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.959226] env[61898]: DEBUG nova.compute.provider_tree [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.992518] env[61898]: DEBUG nova.network.neutron [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Successfully updated port: bd9582d2-676b-45d5-be6a-1883dd40c2ff {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1033.462804] env[61898]: DEBUG nova.scheduler.client.report [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1033.494859] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.495030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.495216] env[61898]: DEBUG nova.network.neutron [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1033.967708] env[61898]: DEBUG oslo_concurrency.lockutils [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.970710] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.160s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.970710] env[61898]: DEBUG nova.objects.instance [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lazy-loading 'resources' on Instance uuid d4189084-f73f-4857-a418-6eb7f5b90d83 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.978827] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "d0184b78-1525-44a4-a515-3eeb34a59cde" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.979088] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.989832] env[61898]: INFO nova.scheduler.client.report [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Deleted allocations for instance 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b [ 1034.027131] env[61898]: DEBUG nova.network.neutron [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1034.154114] env[61898]: DEBUG nova.network.neutron [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Updating instance_info_cache with network_info: [{"id": "bd9582d2-676b-45d5-be6a-1883dd40c2ff", "address": "fa:16:3e:24:f1:a9", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd9582d2-67", "ovs_interfaceid": "bd9582d2-676b-45d5-be6a-1883dd40c2ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.482504] env[61898]: DEBUG nova.compute.utils [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1034.500532] env[61898]: DEBUG oslo_concurrency.lockutils [None req-86072438-ab8d-4c8a-bd2d-c83ef1dc0229 tempest-AttachVolumeTestJSON-1583624250 tempest-AttachVolumeTestJSON-1583624250-project-member] Lock "622326f9-b3c5-452e-b7f6-dfe6de1e7d4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.129s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.564574] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3673bedc-1cd4-4332-885e-9c01158f23cd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.572903] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95cb9803-d202-4643-81b3-03df28f1fb1d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.605179] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9395dc7-1259-4ea0-87c8-2c8375012d0c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.613052] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81c834c-3a4e-4592-b80d-747d115a4d52 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.626201] env[61898]: DEBUG nova.compute.provider_tree [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.656189] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.656529] env[61898]: DEBUG nova.compute.manager [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Instance network_info: |[{"id": "bd9582d2-676b-45d5-be6a-1883dd40c2ff", "address": "fa:16:3e:24:f1:a9", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd9582d2-67", "ovs_interfaceid": "bd9582d2-676b-45d5-be6a-1883dd40c2ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1034.656954] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:f1:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '418ddd3d-5f64-407e-8e0c-c8b81639bee9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd9582d2-676b-45d5-be6a-1883dd40c2ff', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1034.664806] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Creating folder: Project (00a5473d225540e186d6778172a187cb). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1034.665155] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-973cb554-0f2c-4fec-8393-4e07729f7634 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.675169] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Created folder: Project (00a5473d225540e186d6778172a187cb) in parent group-v267550. [ 1034.675440] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Creating folder: Instances. Parent ref: group-v267719. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1034.676391] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aebfce1d-cabc-4ba7-96aa-165e04144b2b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.685332] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Created folder: Instances in parent group-v267719. [ 1034.685572] env[61898]: DEBUG oslo.service.loopingcall [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1034.685768] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1034.685969] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbf0d5bb-9fa2-4bc9-ae49-26c105dc775a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.704206] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1034.704206] env[61898]: value = "task-1241219" [ 1034.704206] env[61898]: _type = "Task" [ 1034.704206] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.714638] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241219, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.901783] env[61898]: DEBUG nova.compute.manager [req-f84fb39c-0da9-48c7-a36b-7cee1a0fb80a req-bfa463c7-afa8-4831-99fe-75e9286736c4 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Received event network-changed-bd9582d2-676b-45d5-be6a-1883dd40c2ff {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1034.902094] env[61898]: DEBUG nova.compute.manager [req-f84fb39c-0da9-48c7-a36b-7cee1a0fb80a req-bfa463c7-afa8-4831-99fe-75e9286736c4 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Refreshing instance network info cache due to event network-changed-bd9582d2-676b-45d5-be6a-1883dd40c2ff. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1034.902401] env[61898]: DEBUG oslo_concurrency.lockutils [req-f84fb39c-0da9-48c7-a36b-7cee1a0fb80a req-bfa463c7-afa8-4831-99fe-75e9286736c4 service nova] Acquiring lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.902612] env[61898]: DEBUG oslo_concurrency.lockutils [req-f84fb39c-0da9-48c7-a36b-7cee1a0fb80a req-bfa463c7-afa8-4831-99fe-75e9286736c4 service nova] Acquired lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.902832] env[61898]: DEBUG nova.network.neutron [req-f84fb39c-0da9-48c7-a36b-7cee1a0fb80a req-bfa463c7-afa8-4831-99fe-75e9286736c4 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Refreshing network info cache for port bd9582d2-676b-45d5-be6a-1883dd40c2ff {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.985056] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.129120] env[61898]: DEBUG nova.scheduler.client.report [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1035.215058] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241219, 'name': CreateVM_Task, 'duration_secs': 0.31543} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.215285] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1035.215977] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.216170] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.216489] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1035.216760] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13f2ee97-4f93-45b8-8dd3-7be9b735dd3b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.221591] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1035.221591] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]524fe59d-2ac7-2c79-1dee-8fcaefc3f8f5" [ 1035.221591] env[61898]: _type = "Task" [ 1035.221591] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.229352] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524fe59d-2ac7-2c79-1dee-8fcaefc3f8f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.634305] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.664s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.657125] env[61898]: INFO nova.scheduler.client.report [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted allocations for instance d4189084-f73f-4857-a418-6eb7f5b90d83 [ 1035.663503] env[61898]: DEBUG nova.network.neutron [req-f84fb39c-0da9-48c7-a36b-7cee1a0fb80a req-bfa463c7-afa8-4831-99fe-75e9286736c4 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Updated VIF entry in instance network info cache for port bd9582d2-676b-45d5-be6a-1883dd40c2ff. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1035.663846] env[61898]: DEBUG nova.network.neutron [req-f84fb39c-0da9-48c7-a36b-7cee1a0fb80a req-bfa463c7-afa8-4831-99fe-75e9286736c4 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Updating instance_info_cache with network_info: [{"id": "bd9582d2-676b-45d5-be6a-1883dd40c2ff", "address": "fa:16:3e:24:f1:a9", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd9582d2-67", "ovs_interfaceid": "bd9582d2-676b-45d5-be6a-1883dd40c2ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.732545] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524fe59d-2ac7-2c79-1dee-8fcaefc3f8f5, 'name': SearchDatastore_Task, 'duration_secs': 0.009945} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.732870] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.733631] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.733631] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.733631] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.733799] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.733913] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a277f4a7-c870-4840-872a-8a3c7d748b4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.741701] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.741881] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1035.742587] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f62109d-2006-436b-b052-6420233ac733 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.747310] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1035.747310] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52bb96c6-f7ad-d532-2955-310eb0d054ed" [ 1035.747310] env[61898]: _type = "Task" [ 1035.747310] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.755586] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52bb96c6-f7ad-d532-2955-310eb0d054ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.061413] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "d0184b78-1525-44a4-a515-3eeb34a59cde" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.064384] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.064757] env[61898]: INFO nova.compute.manager [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Attaching volume 06dc7a5e-d2cb-441b-8708-03e168a9b278 to /dev/sdb [ 1036.103882] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd66fef-6161-4e24-930e-5bd3143c2f3e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.111620] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212c48ff-7b00-4b1b-9076-600ac71a726d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.125545] env[61898]: DEBUG nova.virt.block_device [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating existing volume attachment record: 08cc4c2f-1186-4739-9117-4f87ede6d08d {{(pid=61898) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1036.171570] env[61898]: DEBUG oslo_concurrency.lockutils [req-f84fb39c-0da9-48c7-a36b-7cee1a0fb80a req-bfa463c7-afa8-4831-99fe-75e9286736c4 service nova] Releasing lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.172582] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0b0b92a6-e6b6-4071-85ee-63ae95809089 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "d4189084-f73f-4857-a418-6eb7f5b90d83" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.212s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.258957] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52bb96c6-f7ad-d532-2955-310eb0d054ed, 'name': SearchDatastore_Task, 'duration_secs': 0.007747} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.259982] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaa58cf6-4dd7-4ac0-afc8-28d82844b2d4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.266016] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1036.266016] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]524294db-4464-d794-27b7-577681810b86" [ 1036.266016] env[61898]: _type = "Task" [ 1036.266016] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.278913] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524294db-4464-d794-27b7-577681810b86, 'name': SearchDatastore_Task} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.279375] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.279727] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] f8b4a587-9ca8-4710-8cf6-3f6ea336185c/f8b4a587-9ca8-4710-8cf6-3f6ea336185c.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1036.279884] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93d43928-c736-4f5b-9ec6-5e4570e32c6f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.286335] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1036.286335] env[61898]: value = "task-1241222" [ 1036.286335] env[61898]: _type = "Task" [ 1036.286335] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.296692] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241222, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.797974] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241222, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470761} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.797974] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] f8b4a587-9ca8-4710-8cf6-3f6ea336185c/f8b4a587-9ca8-4710-8cf6-3f6ea336185c.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1036.797974] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1036.797974] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78245842-2d62-404d-8443-940f6e1f5248 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.804021] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1036.804021] env[61898]: value = "task-1241225" [ 1036.804021] env[61898]: _type = "Task" [ 1036.804021] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.809223] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241225, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.998554] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "320577e5-f197-4f66-a94f-9b9ba2479325" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.999115] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "320577e5-f197-4f66-a94f-9b9ba2479325" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.999235] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "320577e5-f197-4f66-a94f-9b9ba2479325-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.999551] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "320577e5-f197-4f66-a94f-9b9ba2479325-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.999851] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "320577e5-f197-4f66-a94f-9b9ba2479325-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.002496] env[61898]: INFO nova.compute.manager [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Terminating instance [ 1037.312653] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241225, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058307} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.312947] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.314144] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815a6c6b-bb50-4c4f-9f99-b342c22f0966 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.336428] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] f8b4a587-9ca8-4710-8cf6-3f6ea336185c/f8b4a587-9ca8-4710-8cf6-3f6ea336185c.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.336833] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35c1353a-1f99-4f5a-9ed6-81126b5b794b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.356886] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1037.356886] env[61898]: value = "task-1241226" [ 1037.356886] env[61898]: _type = "Task" [ 1037.356886] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.365747] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241226, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.507788] env[61898]: DEBUG nova.compute.manager [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1037.508058] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1037.509146] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c351acb1-d73b-4b68-98c9-d96c52aa0a07 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.517078] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1037.517301] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b6f7a21-dddf-4337-b0fd-5b7874a1a57f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.523062] env[61898]: DEBUG oslo_vmware.api [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1037.523062] env[61898]: value = "task-1241227" [ 1037.523062] env[61898]: _type = "Task" [ 1037.523062] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.531367] env[61898]: DEBUG oslo_vmware.api [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241227, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.867224] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241226, 'name': ReconfigVM_Task, 'duration_secs': 0.277828} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.867557] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Reconfigured VM instance instance-0000006a to attach disk [datastore2] f8b4a587-9ca8-4710-8cf6-3f6ea336185c/f8b4a587-9ca8-4710-8cf6-3f6ea336185c.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.868253] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17a6ed37-34d0-44bd-9948-14e8cb578c0c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.874728] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1037.874728] env[61898]: value = "task-1241228" [ 1037.874728] env[61898]: _type = "Task" [ 1037.874728] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.882875] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241228, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.926163] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquiring lock "564f08f5-17b2-477d-b1d5-7cd46436fad1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.926263] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "564f08f5-17b2-477d-b1d5-7cd46436fad1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.033631] env[61898]: DEBUG oslo_vmware.api [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241227, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.385011] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241228, 'name': Rename_Task, 'duration_secs': 0.140874} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.387332] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.387621] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-876a759d-c49d-413c-a0ae-bb3d4f8a5113 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.393685] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1038.393685] env[61898]: value = "task-1241230" [ 1038.393685] env[61898]: _type = "Task" [ 1038.393685] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.409907] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241230, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.428485] env[61898]: DEBUG nova.compute.manager [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1038.534174] env[61898]: DEBUG oslo_vmware.api [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241227, 'name': PowerOffVM_Task, 'duration_secs': 0.660463} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.534174] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1038.534174] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1038.534174] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3db89dd5-05c8-46e6-8f95-102ad04ce8b3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.604268] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1038.604268] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1038.604268] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleting the datastore file [datastore2] 320577e5-f197-4f66-a94f-9b9ba2479325 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1038.604268] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19689281-55cd-4868-abc9-191054819599 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.611664] env[61898]: DEBUG oslo_vmware.api [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for the task: (returnval){ [ 1038.611664] env[61898]: value = "task-1241233" [ 1038.611664] env[61898]: _type = "Task" [ 1038.611664] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.619575] env[61898]: DEBUG oslo_vmware.api [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241233, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.904810] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241230, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.953816] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.954182] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.955764] env[61898]: INFO nova.compute.claims [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.123082] env[61898]: DEBUG oslo_vmware.api [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Task: {'id': task-1241233, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132479} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.123381] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1039.123585] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1039.123771] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1039.123949] env[61898]: INFO nova.compute.manager [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1039.124215] env[61898]: DEBUG oslo.service.loopingcall [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1039.124789] env[61898]: DEBUG nova.compute.manager [-] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1039.124894] env[61898]: DEBUG nova.network.neutron [-] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1039.348099] env[61898]: DEBUG nova.compute.manager [req-cbb62bc2-6949-48db-af08-746f88fa24dd req-36cb0875-9927-47c8-b8ab-0804c896b4eb service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Received event network-vif-deleted-92b514d1-73d5-449b-8f17-dd283c2d7014 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1039.348406] env[61898]: INFO nova.compute.manager [req-cbb62bc2-6949-48db-af08-746f88fa24dd req-36cb0875-9927-47c8-b8ab-0804c896b4eb service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Neutron deleted interface 92b514d1-73d5-449b-8f17-dd283c2d7014; detaching it from the instance and deleting it from the info cache [ 1039.348606] env[61898]: DEBUG nova.network.neutron [req-cbb62bc2-6949-48db-af08-746f88fa24dd req-36cb0875-9927-47c8-b8ab-0804c896b4eb service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.405517] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241230, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.826129] env[61898]: DEBUG nova.network.neutron [-] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.852946] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f4f63654-f476-49d4-b309-be8018528e2a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.863162] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f581d5b-744d-4dd5-aa9f-2064f0167c51 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.888878] env[61898]: DEBUG nova.compute.manager [req-cbb62bc2-6949-48db-af08-746f88fa24dd req-36cb0875-9927-47c8-b8ab-0804c896b4eb service nova] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Detach interface failed, port_id=92b514d1-73d5-449b-8f17-dd283c2d7014, reason: Instance 320577e5-f197-4f66-a94f-9b9ba2479325 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1039.904699] env[61898]: DEBUG oslo_vmware.api [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241230, 'name': PowerOnVM_Task, 'duration_secs': 1.07757} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.905026] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1039.905298] env[61898]: INFO nova.compute.manager [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Took 7.31 seconds to spawn the instance on the hypervisor. [ 1039.905521] env[61898]: DEBUG nova.compute.manager [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1039.906318] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8adbf9-cc13-4a53-b54c-1b9edbb7d6cc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.037510] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fe0384-7780-435e-bf9d-af55d8f150bd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.045065] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4b159b-8a44-4ba5-978e-d149a23590f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.075110] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0702c3fa-bd6a-4b7c-a9ce-04fa506b1c64 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.082609] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4397aaa6-3368-4b41-ae08-9f3c810f79c9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.096585] env[61898]: DEBUG nova.compute.provider_tree [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.329311] env[61898]: INFO nova.compute.manager [-] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Took 1.20 seconds to deallocate network for instance. [ 1040.425322] env[61898]: INFO nova.compute.manager [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Took 12.06 seconds to build instance. [ 1040.599530] env[61898]: DEBUG nova.scheduler.client.report [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1040.676130] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Volume attach. Driver type: vmdk {{(pid=61898) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1040.676467] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267723', 'volume_id': '06dc7a5e-d2cb-441b-8708-03e168a9b278', 'name': 'volume-06dc7a5e-d2cb-441b-8708-03e168a9b278', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd0184b78-1525-44a4-a515-3eeb34a59cde', 'attached_at': '', 'detached_at': '', 'volume_id': '06dc7a5e-d2cb-441b-8708-03e168a9b278', 'serial': '06dc7a5e-d2cb-441b-8708-03e168a9b278'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1040.677389] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e6c006-9af8-4106-92ac-825290ef7421 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.694036] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f54e65-8837-4f49-9d5c-6ba3d21feaa0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.721602] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] volume-06dc7a5e-d2cb-441b-8708-03e168a9b278/volume-06dc7a5e-d2cb-441b-8708-03e168a9b278.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.721933] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-389a8fe6-034d-4810-9028-9ce292d0e213 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.740836] env[61898]: DEBUG oslo_vmware.api [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1040.740836] env[61898]: value = "task-1241234" [ 1040.740836] env[61898]: _type = "Task" [ 1040.740836] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.747891] env[61898]: DEBUG oslo_vmware.api [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241234, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.835597] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.928255] env[61898]: DEBUG oslo_concurrency.lockutils [None req-58dbdad1-5e79-4596-9105-e51a4589e312 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.572s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.106058] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.151s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.106346] env[61898]: DEBUG nova.compute.manager [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1041.108933] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.274s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.109185] env[61898]: DEBUG nova.objects.instance [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lazy-loading 'resources' on Instance uuid 320577e5-f197-4f66-a94f-9b9ba2479325 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.251299] env[61898]: DEBUG oslo_vmware.api [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241234, 'name': ReconfigVM_Task, 'duration_secs': 0.345155} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.251670] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfigured VM instance instance-00000065 to attach disk [datastore2] volume-06dc7a5e-d2cb-441b-8708-03e168a9b278/volume-06dc7a5e-d2cb-441b-8708-03e168a9b278.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.256331] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e90cf329-cc34-417f-a1b8-2ed9b1724d7d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.270299] env[61898]: DEBUG oslo_vmware.api [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1041.270299] env[61898]: value = "task-1241236" [ 1041.270299] env[61898]: _type = "Task" [ 1041.270299] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.281537] env[61898]: DEBUG oslo_vmware.api [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241236, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.373718] env[61898]: DEBUG nova.compute.manager [req-095ce238-a3ff-4826-ac1a-4fa94948981e req-a6b63de7-e3aa-4555-8eba-e5127cb3663a service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Received event network-changed-bd9582d2-676b-45d5-be6a-1883dd40c2ff {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1041.373954] env[61898]: DEBUG nova.compute.manager [req-095ce238-a3ff-4826-ac1a-4fa94948981e req-a6b63de7-e3aa-4555-8eba-e5127cb3663a service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Refreshing instance network info cache due to event network-changed-bd9582d2-676b-45d5-be6a-1883dd40c2ff. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1041.374241] env[61898]: DEBUG oslo_concurrency.lockutils [req-095ce238-a3ff-4826-ac1a-4fa94948981e req-a6b63de7-e3aa-4555-8eba-e5127cb3663a service nova] Acquiring lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1041.374435] env[61898]: DEBUG oslo_concurrency.lockutils [req-095ce238-a3ff-4826-ac1a-4fa94948981e req-a6b63de7-e3aa-4555-8eba-e5127cb3663a service nova] Acquired lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.374648] env[61898]: DEBUG nova.network.neutron [req-095ce238-a3ff-4826-ac1a-4fa94948981e req-a6b63de7-e3aa-4555-8eba-e5127cb3663a service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Refreshing network info cache for port bd9582d2-676b-45d5-be6a-1883dd40c2ff {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.611840] env[61898]: DEBUG nova.compute.utils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1041.614485] env[61898]: DEBUG nova.compute.manager [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1041.614485] env[61898]: DEBUG nova.network.neutron [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1041.675144] env[61898]: DEBUG nova.policy [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dac287e0f5924b26922c5d1bba242248', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3e0f463034b4a398c2b191311f629b9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 1041.693026] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de37f987-3e51-48e0-a54b-9084d48bcaf1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.700291] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc705dba-d7b6-443b-a5f1-da88c3e104f7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.731442] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a6a4fd-fcff-4236-8939-73763d6f2994 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.738558] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a67f94-ce0f-47c4-b8f6-0298622f202e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.751414] env[61898]: DEBUG nova.compute.provider_tree [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.779371] env[61898]: DEBUG oslo_vmware.api [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241236, 'name': ReconfigVM_Task, 'duration_secs': 0.143326} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.779487] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267723', 'volume_id': '06dc7a5e-d2cb-441b-8708-03e168a9b278', 'name': 'volume-06dc7a5e-d2cb-441b-8708-03e168a9b278', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd0184b78-1525-44a4-a515-3eeb34a59cde', 'attached_at': '', 'detached_at': '', 'volume_id': '06dc7a5e-d2cb-441b-8708-03e168a9b278', 'serial': '06dc7a5e-d2cb-441b-8708-03e168a9b278'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1041.919584] env[61898]: DEBUG nova.network.neutron [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Successfully created port: e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1042.120212] env[61898]: DEBUG nova.compute.manager [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1042.160304] env[61898]: DEBUG nova.network.neutron [req-095ce238-a3ff-4826-ac1a-4fa94948981e req-a6b63de7-e3aa-4555-8eba-e5127cb3663a service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Updated VIF entry in instance network info cache for port bd9582d2-676b-45d5-be6a-1883dd40c2ff. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1042.160676] env[61898]: DEBUG nova.network.neutron [req-095ce238-a3ff-4826-ac1a-4fa94948981e req-a6b63de7-e3aa-4555-8eba-e5127cb3663a service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Updating instance_info_cache with network_info: [{"id": "bd9582d2-676b-45d5-be6a-1883dd40c2ff", "address": "fa:16:3e:24:f1:a9", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd9582d2-67", "ovs_interfaceid": "bd9582d2-676b-45d5-be6a-1883dd40c2ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.254971] env[61898]: DEBUG nova.scheduler.client.report [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1042.663413] env[61898]: DEBUG oslo_concurrency.lockutils [req-095ce238-a3ff-4826-ac1a-4fa94948981e req-a6b63de7-e3aa-4555-8eba-e5127cb3663a service nova] Releasing lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1042.760290] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.651s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.784479] env[61898]: INFO nova.scheduler.client.report [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Deleted allocations for instance 320577e5-f197-4f66-a94f-9b9ba2479325 [ 1042.816322] env[61898]: DEBUG nova.objects.instance [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'flavor' on Instance uuid d0184b78-1525-44a4-a515-3eeb34a59cde {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.132963] env[61898]: DEBUG nova.compute.manager [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1043.158690] env[61898]: DEBUG nova.virt.hardware [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1043.158955] env[61898]: DEBUG nova.virt.hardware [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1043.159152] env[61898]: DEBUG nova.virt.hardware [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1043.159349] env[61898]: DEBUG nova.virt.hardware [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1043.159512] env[61898]: DEBUG nova.virt.hardware [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1043.159656] env[61898]: DEBUG nova.virt.hardware [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1043.159869] env[61898]: DEBUG nova.virt.hardware [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1043.160046] env[61898]: DEBUG nova.virt.hardware [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1043.160222] env[61898]: DEBUG nova.virt.hardware [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1043.160392] env[61898]: DEBUG nova.virt.hardware [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1043.160569] env[61898]: DEBUG nova.virt.hardware [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1043.161495] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfd7944-21b4-4302-841d-3be2b8bca671 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.169911] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c29448-e106-45d7-a920-e587aca397a9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.294493] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5313a316-769c-464b-a2ce-136ffd22f1c5 tempest-ServersTestJSON-1212200691 tempest-ServersTestJSON-1212200691-project-member] Lock "320577e5-f197-4f66-a94f-9b9ba2479325" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.295s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.322634] env[61898]: DEBUG oslo_concurrency.lockutils [None req-cdda42ca-cf03-421e-a94d-b2e17ba3fb27 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.261s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.406688] env[61898]: DEBUG nova.network.neutron [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Successfully updated port: e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1043.412243] env[61898]: DEBUG nova.compute.manager [req-808dbc43-47f4-42a1-9470-0b3db71753a8 req-645092b5-66c5-4c13-9dd7-33adf8372743 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Received event network-vif-plugged-e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1043.412520] env[61898]: DEBUG oslo_concurrency.lockutils [req-808dbc43-47f4-42a1-9470-0b3db71753a8 req-645092b5-66c5-4c13-9dd7-33adf8372743 service nova] Acquiring lock "564f08f5-17b2-477d-b1d5-7cd46436fad1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.412742] env[61898]: DEBUG oslo_concurrency.lockutils [req-808dbc43-47f4-42a1-9470-0b3db71753a8 req-645092b5-66c5-4c13-9dd7-33adf8372743 service nova] Lock "564f08f5-17b2-477d-b1d5-7cd46436fad1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.413138] env[61898]: DEBUG oslo_concurrency.lockutils [req-808dbc43-47f4-42a1-9470-0b3db71753a8 req-645092b5-66c5-4c13-9dd7-33adf8372743 service nova] Lock "564f08f5-17b2-477d-b1d5-7cd46436fad1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.413602] env[61898]: DEBUG nova.compute.manager [req-808dbc43-47f4-42a1-9470-0b3db71753a8 req-645092b5-66c5-4c13-9dd7-33adf8372743 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] No waiting events found dispatching network-vif-plugged-e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1043.413830] env[61898]: WARNING nova.compute.manager [req-808dbc43-47f4-42a1-9470-0b3db71753a8 req-645092b5-66c5-4c13-9dd7-33adf8372743 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Received unexpected event network-vif-plugged-e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c for instance with vm_state building and task_state spawning. [ 1043.910716] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquiring lock "refresh_cache-564f08f5-17b2-477d-b1d5-7cd46436fad1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1043.911138] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquired lock "refresh_cache-564f08f5-17b2-477d-b1d5-7cd46436fad1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.911138] env[61898]: DEBUG nova.network.neutron [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1044.156226] env[61898]: DEBUG nova.compute.manager [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Stashing vm_state: active {{(pid=61898) _prep_resize /opt/stack/nova/nova/compute/manager.py:5953}} [ 1044.466451] env[61898]: DEBUG nova.network.neutron [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1044.637777] env[61898]: DEBUG nova.network.neutron [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Updating instance_info_cache with network_info: [{"id": "e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c", "address": "fa:16:3e:d6:97:d9", "network": {"id": "78918013-0694-4cb9-8d8a-f99e28370ea7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1736612263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3e0f463034b4a398c2b191311f629b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape25fbd8f-78", "ovs_interfaceid": "e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.678012] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.682495] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.004s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.143022] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Releasing lock "refresh_cache-564f08f5-17b2-477d-b1d5-7cd46436fad1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.143022] env[61898]: DEBUG nova.compute.manager [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Instance network_info: |[{"id": "e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c", "address": "fa:16:3e:d6:97:d9", "network": {"id": "78918013-0694-4cb9-8d8a-f99e28370ea7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1736612263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3e0f463034b4a398c2b191311f629b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape25fbd8f-78", "ovs_interfaceid": "e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1045.143022] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:97:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e7f6f41-f4eb-4832-a390-730fca1cf717', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1045.149844] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Creating folder: Project (b3e0f463034b4a398c2b191311f629b9). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1045.150281] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b55d7d5-6e4a-4e23-9441-63fe0e59f3ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.161321] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Created folder: Project (b3e0f463034b4a398c2b191311f629b9) in parent group-v267550. [ 1045.163073] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Creating folder: Instances. Parent ref: group-v267724. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1045.163073] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39728ad6-00a7-47a6-ad3c-2694ab6780a0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.170843] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Created folder: Instances in parent group-v267724. [ 1045.170843] env[61898]: DEBUG oslo.service.loopingcall [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.170843] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1045.171332] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0bc6f34-a5cb-42ef-9af5-36087945dab0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.188762] env[61898]: INFO nova.compute.claims [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.196825] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.196825] env[61898]: value = "task-1241239" [ 1045.196825] env[61898]: _type = "Task" [ 1045.196825] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.207062] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241239, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.443847] env[61898]: DEBUG nova.compute.manager [req-c6edf656-fdfe-4648-9a44-6230cfe18759 req-7d196b7d-150d-4d8a-b334-14a6b8b5bd34 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Received event network-changed-e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1045.443847] env[61898]: DEBUG nova.compute.manager [req-c6edf656-fdfe-4648-9a44-6230cfe18759 req-7d196b7d-150d-4d8a-b334-14a6b8b5bd34 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Refreshing instance network info cache due to event network-changed-e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1045.443847] env[61898]: DEBUG oslo_concurrency.lockutils [req-c6edf656-fdfe-4648-9a44-6230cfe18759 req-7d196b7d-150d-4d8a-b334-14a6b8b5bd34 service nova] Acquiring lock "refresh_cache-564f08f5-17b2-477d-b1d5-7cd46436fad1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.443847] env[61898]: DEBUG oslo_concurrency.lockutils [req-c6edf656-fdfe-4648-9a44-6230cfe18759 req-7d196b7d-150d-4d8a-b334-14a6b8b5bd34 service nova] Acquired lock "refresh_cache-564f08f5-17b2-477d-b1d5-7cd46436fad1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.443847] env[61898]: DEBUG nova.network.neutron [req-c6edf656-fdfe-4648-9a44-6230cfe18759 req-7d196b7d-150d-4d8a-b334-14a6b8b5bd34 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Refreshing network info cache for port e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1045.695566] env[61898]: INFO nova.compute.resource_tracker [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating resource usage from migration df03973c-d1e5-4406-8a29-8b137fc9727b [ 1045.706959] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241239, 'name': CreateVM_Task, 'duration_secs': 0.325645} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.707151] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1045.707806] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.707978] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.708320] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1045.708603] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1bdf0b9-a3c0-419e-b7f1-8bae356b2724 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.713090] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for the task: (returnval){ [ 1045.713090] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52cfebc6-32d2-2955-080a-ae14474e610f" [ 1045.713090] env[61898]: _type = "Task" [ 1045.713090] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.723544] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cfebc6-32d2-2955-080a-ae14474e610f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.768572] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7a861f-8b7d-4e32-b0c9-a38de45664e5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.775529] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecccd933-6f11-45ea-b808-ddd58d79da2d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.804516] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc805d55-1d29-4803-860a-08cf4fcf6abe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.812288] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45891ac-c12b-4851-8146-66f3b457400d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.825399] env[61898]: DEBUG nova.compute.provider_tree [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.939567] env[61898]: DEBUG oslo_concurrency.lockutils [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "523a29df-e21d-4e38-9437-ebcdd7012f57" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.939922] env[61898]: DEBUG oslo_concurrency.lockutils [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.940135] env[61898]: INFO nova.compute.manager [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Shelving [ 1046.179235] env[61898]: DEBUG nova.network.neutron [req-c6edf656-fdfe-4648-9a44-6230cfe18759 req-7d196b7d-150d-4d8a-b334-14a6b8b5bd34 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Updated VIF entry in instance network info cache for port e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1046.179619] env[61898]: DEBUG nova.network.neutron [req-c6edf656-fdfe-4648-9a44-6230cfe18759 req-7d196b7d-150d-4d8a-b334-14a6b8b5bd34 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Updating instance_info_cache with network_info: [{"id": "e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c", "address": "fa:16:3e:d6:97:d9", "network": {"id": "78918013-0694-4cb9-8d8a-f99e28370ea7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1736612263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3e0f463034b4a398c2b191311f629b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape25fbd8f-78", "ovs_interfaceid": "e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.226181] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52cfebc6-32d2-2955-080a-ae14474e610f, 'name': SearchDatastore_Task, 'duration_secs': 0.010365} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.226600] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.226920] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.227228] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.227401] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.227620] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1046.227937] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-555a1c31-0f33-4b6d-b563-e326e5d441eb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.236800] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1046.237154] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1046.237860] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3af8edd0-fa19-4fd0-a837-98ca21df5f61 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.243424] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for the task: (returnval){ [ 1046.243424] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e9f909-e8d6-37d8-1780-79ec4c591aef" [ 1046.243424] env[61898]: _type = "Task" [ 1046.243424] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.251809] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e9f909-e8d6-37d8-1780-79ec4c591aef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.329974] env[61898]: DEBUG nova.scheduler.client.report [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1046.683196] env[61898]: DEBUG oslo_concurrency.lockutils [req-c6edf656-fdfe-4648-9a44-6230cfe18759 req-7d196b7d-150d-4d8a-b334-14a6b8b5bd34 service nova] Releasing lock "refresh_cache-564f08f5-17b2-477d-b1d5-7cd46436fad1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.756114] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e9f909-e8d6-37d8-1780-79ec4c591aef, 'name': SearchDatastore_Task, 'duration_secs': 0.009485} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.757241] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12d86ee8-f400-4869-9e04-2dcacf52d992 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.762679] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for the task: (returnval){ [ 1046.762679] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52a613fd-cca5-6276-3554-bc9f982c0c80" [ 1046.762679] env[61898]: _type = "Task" [ 1046.762679] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.770717] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a613fd-cca5-6276-3554-bc9f982c0c80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.837949] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.157s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.838207] env[61898]: INFO nova.compute.manager [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Migrating [ 1046.952063] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1046.952443] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-266ee9a5-f560-41eb-bc4d-e6fed8b613a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.960383] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1046.960383] env[61898]: value = "task-1241240" [ 1046.960383] env[61898]: _type = "Task" [ 1046.960383] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.969995] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241240, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.273393] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52a613fd-cca5-6276-3554-bc9f982c0c80, 'name': SearchDatastore_Task, 'duration_secs': 0.015874} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.273758] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.274256] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 564f08f5-17b2-477d-b1d5-7cd46436fad1/564f08f5-17b2-477d-b1d5-7cd46436fad1.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1047.274529] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6670cf20-6d3c-4ffb-a0c2-e5312a94cd00 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.281417] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for the task: (returnval){ [ 1047.281417] env[61898]: value = "task-1241241" [ 1047.281417] env[61898]: _type = "Task" [ 1047.281417] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.289231] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241241, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.358679] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.358913] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.359114] env[61898]: DEBUG nova.network.neutron [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1047.473391] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241240, 'name': PowerOffVM_Task, 'duration_secs': 0.267957} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.473735] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1047.474646] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f201c1b-c7b1-4365-9cc1-674dfc57486a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.495817] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acea573d-51e9-4a1b-a238-d116e8558a6d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.790952] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241241, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.443842} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.791245] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 564f08f5-17b2-477d-b1d5-7cd46436fad1/564f08f5-17b2-477d-b1d5-7cd46436fad1.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1047.791464] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1047.791713] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c61fa47-7e4c-40b1-b792-63d749b2de84 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.797653] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for the task: (returnval){ [ 1047.797653] env[61898]: value = "task-1241242" [ 1047.797653] env[61898]: _type = "Task" [ 1047.797653] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.804748] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241242, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.008531] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1048.008961] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-34ebf02a-a2a1-4fec-a182-ed27b5eb34d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.017632] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1048.017632] env[61898]: value = "task-1241243" [ 1048.017632] env[61898]: _type = "Task" [ 1048.017632] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.027672] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241243, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.192836] env[61898]: DEBUG nova.network.neutron [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance_info_cache with network_info: [{"id": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "address": "fa:16:3e:f7:46:22", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630c2b2d-b1", "ovs_interfaceid": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.307470] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241242, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062668} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.307817] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1048.308531] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76c1fd7-1b17-479e-9ace-b826fe9e4caf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.330057] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] 564f08f5-17b2-477d-b1d5-7cd46436fad1/564f08f5-17b2-477d-b1d5-7cd46436fad1.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.330244] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3db6aa6-8009-4033-8949-6420d6d1663b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.349571] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for the task: (returnval){ [ 1048.349571] env[61898]: value = "task-1241244" [ 1048.349571] env[61898]: _type = "Task" [ 1048.349571] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.356898] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241244, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.526550] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241243, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.695635] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.859627] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241244, 'name': ReconfigVM_Task, 'duration_secs': 0.305643} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.859877] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Reconfigured VM instance instance-0000006b to attach disk [datastore2] 564f08f5-17b2-477d-b1d5-7cd46436fad1/564f08f5-17b2-477d-b1d5-7cd46436fad1.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.860559] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a0cbf53-4839-4feb-9b9e-a9837888ec1c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.867398] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for the task: (returnval){ [ 1048.867398] env[61898]: value = "task-1241245" [ 1048.867398] env[61898]: _type = "Task" [ 1048.867398] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.876106] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241245, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.027217] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241243, 'name': CreateSnapshot_Task, 'duration_secs': 0.904691} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.027493] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1049.028210] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92da8869-58d4-41da-b4cd-8ec15590dee1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.376772] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241245, 'name': Rename_Task, 'duration_secs': 0.14681} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.377105] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.377243] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14d233a2-9bf8-43fd-9707-d1a2bf591c14 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.382648] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for the task: (returnval){ [ 1049.382648] env[61898]: value = "task-1241246" [ 1049.382648] env[61898]: _type = "Task" [ 1049.382648] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.389410] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241246, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.546959] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1049.547409] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6195bd23-61a6-4509-af5b-72f64b2f55a4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.556333] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1049.556333] env[61898]: value = "task-1241247" [ 1049.556333] env[61898]: _type = "Task" [ 1049.556333] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.564630] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241247, 'name': CloneVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.893179] env[61898]: DEBUG oslo_vmware.api [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241246, 'name': PowerOnVM_Task, 'duration_secs': 0.46166} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.893449] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.893655] env[61898]: INFO nova.compute.manager [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Took 6.76 seconds to spawn the instance on the hypervisor. [ 1049.893837] env[61898]: DEBUG nova.compute.manager [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1049.894703] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd20f241-9a8c-4bf5-b14b-183050533003 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.068502] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241247, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.213696] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3edfc846-df85-4b04-b83d-b0751bcbba07 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.240169] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance 'd0184b78-1525-44a4-a515-3eeb34a59cde' progress to 0 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1050.411302] env[61898]: INFO nova.compute.manager [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Took 11.48 seconds to build instance. [ 1050.567485] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241247, 'name': CloneVM_Task} progress is 95%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.748345] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.748682] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5929999-0d76-46b5-8f82-b78470576f72 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.755386] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1050.755386] env[61898]: value = "task-1241248" [ 1050.755386] env[61898]: _type = "Task" [ 1050.755386] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.763628] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241248, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.913671] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0a03874e-101a-4a84-8c5e-cdfb1861aa88 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "564f08f5-17b2-477d-b1d5-7cd46436fad1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.987s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.066683] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241247, 'name': CloneVM_Task, 'duration_secs': 1.141049} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.067849] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Created linked-clone VM from snapshot [ 1051.068203] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51087135-61c4-4a9c-8267-152c7c06dbda {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.075075] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Uploading image e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1051.100033] env[61898]: DEBUG oslo_vmware.rw_handles [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1051.100033] env[61898]: value = "vm-267728" [ 1051.100033] env[61898]: _type = "VirtualMachine" [ 1051.100033] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1051.100303] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bcccf115-9862-45d0-aaa3-a03b1259df44 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.106024] env[61898]: DEBUG oslo_vmware.rw_handles [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lease: (returnval){ [ 1051.106024] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5269fb90-5ad1-2194-457c-8ef7b355ed10" [ 1051.106024] env[61898]: _type = "HttpNfcLease" [ 1051.106024] env[61898]: } obtained for exporting VM: (result){ [ 1051.106024] env[61898]: value = "vm-267728" [ 1051.106024] env[61898]: _type = "VirtualMachine" [ 1051.106024] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1051.106264] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the lease: (returnval){ [ 1051.106264] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5269fb90-5ad1-2194-457c-8ef7b355ed10" [ 1051.106264] env[61898]: _type = "HttpNfcLease" [ 1051.106264] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1051.111713] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1051.111713] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5269fb90-5ad1-2194-457c-8ef7b355ed10" [ 1051.111713] env[61898]: _type = "HttpNfcLease" [ 1051.111713] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1051.264936] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241248, 'name': PowerOffVM_Task, 'duration_secs': 0.214106} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.265221] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.265409] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance 'd0184b78-1525-44a4-a515-3eeb34a59cde' progress to 17 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1051.300518] env[61898]: DEBUG oslo_concurrency.lockutils [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquiring lock "564f08f5-17b2-477d-b1d5-7cd46436fad1" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.300779] env[61898]: DEBUG oslo_concurrency.lockutils [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "564f08f5-17b2-477d-b1d5-7cd46436fad1" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.301074] env[61898]: INFO nova.compute.manager [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Rebooting instance [ 1051.614478] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1051.614478] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5269fb90-5ad1-2194-457c-8ef7b355ed10" [ 1051.614478] env[61898]: _type = "HttpNfcLease" [ 1051.614478] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1051.614478] env[61898]: DEBUG oslo_vmware.rw_handles [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1051.614478] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5269fb90-5ad1-2194-457c-8ef7b355ed10" [ 1051.614478] env[61898]: _type = "HttpNfcLease" [ 1051.614478] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1051.615122] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a80279-54a6-4bca-bb27-b59a3c1b5872 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.621777] env[61898]: DEBUG oslo_vmware.rw_handles [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216a062-1074-afa2-81b0-57269ae6b1df/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1051.621950] env[61898]: DEBUG oslo_vmware.rw_handles [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216a062-1074-afa2-81b0-57269ae6b1df/disk-0.vmdk for reading. {{(pid=61898) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1051.708133] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9ef99345-81f6-477e-b25d-f4c4388c285e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.771645] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:53Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1051.771901] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1051.772125] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1051.772338] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1051.772486] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1051.772636] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1051.772835] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1051.772997] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1051.773176] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1051.773343] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1051.773514] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1051.778407] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d12a3413-fac5-4f1d-b489-abe87b79e168 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.794562] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1051.794562] env[61898]: value = "task-1241250" [ 1051.794562] env[61898]: _type = "Task" [ 1051.794562] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.802634] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241250, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.828120] env[61898]: DEBUG oslo_concurrency.lockutils [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquiring lock "refresh_cache-564f08f5-17b2-477d-b1d5-7cd46436fad1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.828345] env[61898]: DEBUG oslo_concurrency.lockutils [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquired lock "refresh_cache-564f08f5-17b2-477d-b1d5-7cd46436fad1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.828549] env[61898]: DEBUG nova.network.neutron [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1052.304997] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241250, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.596284] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.596540] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.596718] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1052.606875] env[61898]: DEBUG nova.network.neutron [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Updating instance_info_cache with network_info: [{"id": "e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c", "address": "fa:16:3e:d6:97:d9", "network": {"id": "78918013-0694-4cb9-8d8a-f99e28370ea7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1736612263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3e0f463034b4a398c2b191311f629b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape25fbd8f-78", "ovs_interfaceid": "e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.805810] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241250, 'name': ReconfigVM_Task} progress is 99%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.110423] env[61898]: DEBUG oslo_concurrency.lockutils [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Releasing lock "refresh_cache-564f08f5-17b2-477d-b1d5-7cd46436fad1" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.112701] env[61898]: DEBUG nova.compute.manager [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1053.113584] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f79e59-29b5-4367-be2e-332bba0f3edc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.305994] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241250, 'name': ReconfigVM_Task, 'duration_secs': 1.175393} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.306447] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance 'd0184b78-1525-44a4-a515-3eeb34a59cde' progress to 33 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1053.812904] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1053.813326] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1053.813435] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1053.813542] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1053.813688] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1053.813838] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1053.814134] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1053.814347] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1053.814526] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1053.814697] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1053.814873] env[61898]: DEBUG nova.virt.hardware [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1053.820245] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfiguring VM instance instance-00000065 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1053.820690] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb7bc8e9-1487-4266-88b0-b1bd9e92ec64 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.841440] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1053.841440] env[61898]: value = "task-1241251" [ 1053.841440] env[61898]: _type = "Task" [ 1053.841440] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.851201] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241251, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.129731] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c2a116-6db2-4adf-932c-394c7cc560c2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.137696] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Doing hard reboot of VM {{(pid=61898) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1054.138013] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-018b81ac-5165-4e2f-aa00-39aacb227469 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.144784] env[61898]: DEBUG oslo_vmware.api [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for the task: (returnval){ [ 1054.144784] env[61898]: value = "task-1241252" [ 1054.144784] env[61898]: _type = "Task" [ 1054.144784] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.152595] env[61898]: DEBUG oslo_vmware.api [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241252, 'name': ResetVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.351418] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241251, 'name': ReconfigVM_Task, 'duration_secs': 0.188419} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.351718] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfigured VM instance instance-00000065 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1054.352562] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf81668b-3e5a-454f-a086-9c67beedbdc4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.376831] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] d0184b78-1525-44a4-a515-3eeb34a59cde/d0184b78-1525-44a4-a515-3eeb34a59cde.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1054.377241] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98883391-32fb-450e-8fda-386d5cf39145 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.395796] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1054.395796] env[61898]: value = "task-1241253" [ 1054.395796] env[61898]: _type = "Task" [ 1054.395796] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.406354] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241253, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.656347] env[61898]: DEBUG oslo_vmware.api [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241252, 'name': ResetVM_Task, 'duration_secs': 0.102383} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.656698] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Did hard reboot of VM {{(pid=61898) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1054.656968] env[61898]: DEBUG nova.compute.manager [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1054.657804] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f54a951-14ae-4dd0-973f-66b4bac17c98 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.906333] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241253, 'name': ReconfigVM_Task, 'duration_secs': 0.299606} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.906698] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfigured VM instance instance-00000065 to attach disk [datastore1] d0184b78-1525-44a4-a515-3eeb34a59cde/d0184b78-1525-44a4-a515-3eeb34a59cde.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1054.906902] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance 'd0184b78-1525-44a4-a515-3eeb34a59cde' progress to 50 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1055.169789] env[61898]: DEBUG oslo_concurrency.lockutils [None req-22c3a84e-6575-41ff-819f-36ad9b5cb600 tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "564f08f5-17b2-477d-b1d5-7cd46436fad1" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.869s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.413967] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4550c6a9-6588-4329-bc7a-71d380ee7627 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.435742] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e36c86-d823-480e-ba0a-c23071418932 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.456198] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance 'd0184b78-1525-44a4-a515-3eeb34a59cde' progress to 67 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1055.970248] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquiring lock "564f08f5-17b2-477d-b1d5-7cd46436fad1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.970569] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "564f08f5-17b2-477d-b1d5-7cd46436fad1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.970820] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquiring lock "564f08f5-17b2-477d-b1d5-7cd46436fad1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.971044] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "564f08f5-17b2-477d-b1d5-7cd46436fad1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.971244] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "564f08f5-17b2-477d-b1d5-7cd46436fad1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1055.973460] env[61898]: INFO nova.compute.manager [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Terminating instance [ 1056.478269] env[61898]: DEBUG nova.compute.manager [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1056.478692] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.480244] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53207d5-4e0c-424d-976f-4cbd82944306 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.490764] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.491032] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f361ed73-827c-40c1-bc79-63c963207084 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.498242] env[61898]: DEBUG oslo_vmware.api [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for the task: (returnval){ [ 1056.498242] env[61898]: value = "task-1241254" [ 1056.498242] env[61898]: _type = "Task" [ 1056.498242] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.505901] env[61898]: DEBUG oslo_vmware.api [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.009098] env[61898]: DEBUG oslo_vmware.api [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241254, 'name': PowerOffVM_Task, 'duration_secs': 0.197515} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.009464] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1057.009594] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.009847] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d06c77b2-ed45-4a4d-a7c5-7ee8aa7263f2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.070259] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.070479] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.070631] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Deleting the datastore file [datastore2] 564f08f5-17b2-477d-b1d5-7cd46436fad1 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.070901] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b0bf2b3-da14-407f-969b-fd1ee84b5fa0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.079650] env[61898]: DEBUG oslo_vmware.api [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for the task: (returnval){ [ 1057.079650] env[61898]: value = "task-1241256" [ 1057.079650] env[61898]: _type = "Task" [ 1057.079650] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.087621] env[61898]: DEBUG oslo_vmware.api [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241256, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.113898] env[61898]: DEBUG nova.network.neutron [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Port 630c2b2d-b17e-470f-ad5f-506c4734d40c binding to destination host cpu-1 is already ACTIVE {{(pid=61898) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1057.590726] env[61898]: DEBUG oslo_vmware.api [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Task: {'id': task-1241256, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209924} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.591088] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.591340] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.591566] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.591804] env[61898]: INFO nova.compute.manager [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1057.592113] env[61898]: DEBUG oslo.service.loopingcall [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1057.592363] env[61898]: DEBUG nova.compute.manager [-] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1057.592490] env[61898]: DEBUG nova.network.neutron [-] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.876771] env[61898]: DEBUG nova.compute.manager [req-ac2e7097-8e8a-46ab-ab62-128b4550c73c req-8c4f7d51-eb4f-47fc-8653-771fef8d41d4 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Received event network-vif-deleted-e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1057.876989] env[61898]: INFO nova.compute.manager [req-ac2e7097-8e8a-46ab-ab62-128b4550c73c req-8c4f7d51-eb4f-47fc-8653-771fef8d41d4 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Neutron deleted interface e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c; detaching it from the instance and deleting it from the info cache [ 1057.877211] env[61898]: DEBUG nova.network.neutron [req-ac2e7097-8e8a-46ab-ab62-128b4550c73c req-8c4f7d51-eb4f-47fc-8653-771fef8d41d4 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.140214] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "d0184b78-1525-44a4-a515-3eeb34a59cde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.140555] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.140656] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.354088] env[61898]: DEBUG nova.network.neutron [-] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.380833] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84dea57c-89bf-4b88-96b2-13b5890d1001 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.392155] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345bbd8b-e0e2-4220-adf6-e10748bb02c3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.417779] env[61898]: DEBUG nova.compute.manager [req-ac2e7097-8e8a-46ab-ab62-128b4550c73c req-8c4f7d51-eb4f-47fc-8653-771fef8d41d4 service nova] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Detach interface failed, port_id=e25fbd8f-78eb-4a5a-9f5c-50e19ca1c09c, reason: Instance 564f08f5-17b2-477d-b1d5-7cd46436fad1 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1058.640540] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Didn't find any instances for network info cache update. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 1058.640772] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.641074] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.641297] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.641519] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.641708] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.641946] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.642114] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61898) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1058.644053] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.856801] env[61898]: INFO nova.compute.manager [-] [instance: 564f08f5-17b2-477d-b1d5-7cd46436fad1] Took 1.26 seconds to deallocate network for instance. [ 1059.150201] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.150563] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.150563] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.150753] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1059.151649] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545de6dd-83fe-41d9-9718-33bdd858e9b3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.160604] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6018ec-1775-40b0-ac72-72b7875db664 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.174536] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305de0c4-2f9f-4a71-9145-33d52378bf24 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.181227] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7d67a8-c2cf-45c0-96c0-d72e55be51ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.184909] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.185090] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.185307] env[61898]: DEBUG nova.network.neutron [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1059.214951] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180978MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1059.215138] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.215333] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.364337] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.571263] env[61898]: DEBUG oslo_vmware.rw_handles [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216a062-1074-afa2-81b0-57269ae6b1df/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1059.572045] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6697e2-39a0-4c45-bc7b-a55c0c35d717 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.578489] env[61898]: DEBUG oslo_vmware.rw_handles [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216a062-1074-afa2-81b0-57269ae6b1df/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1059.578661] env[61898]: ERROR oslo_vmware.rw_handles [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216a062-1074-afa2-81b0-57269ae6b1df/disk-0.vmdk due to incomplete transfer. [ 1059.578872] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a55423b9-4095-44d1-ae69-45b2102ccd1e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.585303] env[61898]: DEBUG oslo_vmware.rw_handles [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5216a062-1074-afa2-81b0-57269ae6b1df/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1059.585496] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Uploaded image e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f to the Glance image server {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1059.587813] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1059.588052] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-34e33d42-0008-4a40-9c44-9b24c9acaf45 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.593563] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1059.593563] env[61898]: value = "task-1241257" [ 1059.593563] env[61898]: _type = "Task" [ 1059.593563] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.601615] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241257, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.900671] env[61898]: DEBUG nova.network.neutron [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance_info_cache with network_info: [{"id": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "address": "fa:16:3e:f7:46:22", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630c2b2d-b1", "ovs_interfaceid": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.103701] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241257, 'name': Destroy_Task} progress is 33%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.222867] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Applying migration context for instance d0184b78-1525-44a4-a515-3eeb34a59cde as it has an incoming, in-progress migration df03973c-d1e5-4406-8a29-8b137fc9727b. Migration status is post-migrating {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1060.223628] env[61898]: INFO nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating resource usage from migration df03973c-d1e5-4406-8a29-8b137fc9727b [ 1060.241906] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 523a29df-e21d-4e38-9437-ebcdd7012f57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.242070] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance f8b4a587-9ca8-4710-8cf6-3f6ea336185c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.242217] env[61898]: WARNING nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 564f08f5-17b2-477d-b1d5-7cd46436fad1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1060.242342] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Migration df03973c-d1e5-4406-8a29-8b137fc9727b is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1060.242457] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance d0184b78-1525-44a4-a515-3eeb34a59cde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.242630] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1060.242762] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1344MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1060.302034] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e2dc22-6e69-4c75-aa24-583eff472d50 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.309579] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9110ca3b-037a-45d3-a841-d29db6b3e724 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.338990] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7464152-6079-4752-bc96-53ef52c6d1d9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.345606] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba37928-93c7-4ee9-a6ba-a2083bd191f6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.999868] env[61898]: DEBUG oslo_concurrency.lockutils [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.016585] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.019909] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241257, 'name': Destroy_Task, 'duration_secs': 0.639117} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.020346] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Destroyed the VM [ 1061.020581] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1061.020824] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4d52ea0f-78b4-42d4-9fbd-f1fa95f0df60 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.026712] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1061.026712] env[61898]: value = "task-1241258" [ 1061.026712] env[61898]: _type = "Task" [ 1061.026712] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.034505] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241258, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.512450] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb0a385-9133-41ad-bb16-a9ae050b8b02 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.519063] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12eafc6d-3b17-4fad-a845-443cafb842ab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.522068] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1061.534236] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241258, 'name': RemoveSnapshot_Task, 'duration_secs': 0.298401} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.537475] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1061.537744] env[61898]: DEBUG nova.compute.manager [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1061.540713] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91bcb8f6-a761-4a14-ac0c-8f12d92fc412 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.026787] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1062.027030] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.812s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.027377] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.663s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.027602] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.029641] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.029784] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Cleaning up deleted instances {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11557}} [ 1062.053028] env[61898]: INFO nova.scheduler.client.report [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Deleted allocations for instance 564f08f5-17b2-477d-b1d5-7cd46436fad1 [ 1062.054162] env[61898]: INFO nova.compute.manager [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Shelve offloading [ 1062.545032] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] There are 54 instances to clean {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11566}} [ 1062.545393] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: d4189084-f73f-4857-a418-6eb7f5b90d83] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1062.561475] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ff55434d-a9ba-4d47-b776-3751e633c69c tempest-InstanceActionsTestJSON-347733048 tempest-InstanceActionsTestJSON-347733048-project-member] Lock "564f08f5-17b2-477d-b1d5-7cd46436fad1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.591s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.562864] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1062.563139] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50ece109-4080-4a67-8956-1c600926c8f5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.570624] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1062.570624] env[61898]: value = "task-1241259" [ 1062.570624] env[61898]: _type = "Task" [ 1062.570624] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.582257] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] VM already powered off {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1062.582521] env[61898]: DEBUG nova.compute.manager [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1062.583479] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a158ade3-68d2-4269-a8d1-696a92048d08 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.590271] env[61898]: DEBUG oslo_concurrency.lockutils [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.590498] env[61898]: DEBUG oslo_concurrency.lockutils [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.590711] env[61898]: DEBUG nova.network.neutron [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1062.621811] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf624be9-c8a7-4267-9d0f-a267c1bcab8e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.642440] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb87df4-2cf6-4faf-a94a-d8d52f88673b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.648837] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance 'd0184b78-1525-44a4-a515-3eeb34a59cde' progress to 83 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1063.048292] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 6fdd2128-9823-4a64-a49a-9f327d63994d] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1063.155467] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1063.155785] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-937a758f-563a-4589-82e6-5ac18460a47a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.163047] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1063.163047] env[61898]: value = "task-1241260" [ 1063.163047] env[61898]: _type = "Task" [ 1063.163047] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.170645] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241260, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.301204] env[61898]: DEBUG nova.network.neutron [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updating instance_info_cache with network_info: [{"id": "fc927434-188b-4c42-9200-bcb870385a25", "address": "fa:16:3e:f0:b0:60", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc927434-18", "ovs_interfaceid": "fc927434-188b-4c42-9200-bcb870385a25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.551221] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 2fe9d97d-57e0-4b08-968b-4bb97a610fbb] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1063.673601] env[61898]: DEBUG oslo_vmware.api [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241260, 'name': PowerOnVM_Task, 'duration_secs': 0.379544} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.673923] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1063.674388] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-72d4b840-708f-4d0c-9f89-09ae16cdd534 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance 'd0184b78-1525-44a4-a515-3eeb34a59cde' progress to 100 {{(pid=61898) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1063.804124] env[61898]: DEBUG oslo_concurrency.lockutils [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.054612] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 587c9997-3b6d-4654-9cf3-f181833c0728] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1064.294339] env[61898]: DEBUG nova.compute.manager [req-31bc2bf2-e3a8-4827-bee7-c0e6a874bb89 req-58809bf9-34e7-4fa9-be7e-aa7f1afc198a service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Received event network-vif-unplugged-fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1064.295655] env[61898]: DEBUG oslo_concurrency.lockutils [req-31bc2bf2-e3a8-4827-bee7-c0e6a874bb89 req-58809bf9-34e7-4fa9-be7e-aa7f1afc198a service nova] Acquiring lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1064.295886] env[61898]: DEBUG oslo_concurrency.lockutils [req-31bc2bf2-e3a8-4827-bee7-c0e6a874bb89 req-58809bf9-34e7-4fa9-be7e-aa7f1afc198a service nova] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.296076] env[61898]: DEBUG oslo_concurrency.lockutils [req-31bc2bf2-e3a8-4827-bee7-c0e6a874bb89 req-58809bf9-34e7-4fa9-be7e-aa7f1afc198a service nova] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.296259] env[61898]: DEBUG nova.compute.manager [req-31bc2bf2-e3a8-4827-bee7-c0e6a874bb89 req-58809bf9-34e7-4fa9-be7e-aa7f1afc198a service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] No waiting events found dispatching network-vif-unplugged-fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1064.296440] env[61898]: WARNING nova.compute.manager [req-31bc2bf2-e3a8-4827-bee7-c0e6a874bb89 req-58809bf9-34e7-4fa9-be7e-aa7f1afc198a service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Received unexpected event network-vif-unplugged-fc927434-188b-4c42-9200-bcb870385a25 for instance with vm_state shelved and task_state shelving_offloading. [ 1064.393788] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.394558] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1e4fbd-a3bc-4ece-9199-0a5f730bc2b4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.402395] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1064.402689] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1734a733-cc41-4280-8955-8efd3900308d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.484923] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1064.485189] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1064.485383] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleting the datastore file [datastore2] 523a29df-e21d-4e38-9437-ebcdd7012f57 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1064.485656] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4ab057f-2583-466a-8e3e-498d566bc9c2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.493540] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1064.493540] env[61898]: value = "task-1241262" [ 1064.493540] env[61898]: _type = "Task" [ 1064.493540] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.503133] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241262, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.558459] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 7eb0d534-90c8-439d-a894-3f03151ac74b] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1065.003925] env[61898]: DEBUG oslo_vmware.api [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241262, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130691} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.004261] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.004447] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.004615] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.061485] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 008bab4f-240b-4cb7-86eb-9b1f01ea6e4c] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1065.139496] env[61898]: INFO nova.scheduler.client.report [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleted allocations for instance 523a29df-e21d-4e38-9437-ebcdd7012f57 [ 1065.565690] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 06c894a2-9236-4534-922f-4255c6cf0531] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1065.644252] env[61898]: DEBUG oslo_concurrency.lockutils [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.644252] env[61898]: DEBUG oslo_concurrency.lockutils [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.644420] env[61898]: DEBUG nova.objects.instance [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lazy-loading 'resources' on Instance uuid 523a29df-e21d-4e38-9437-ebcdd7012f57 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.069871] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: c26c4add-728c-45ea-8465-7c4273b7d97b] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1066.114705] env[61898]: DEBUG nova.network.neutron [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Port 630c2b2d-b17e-470f-ad5f-506c4734d40c binding to destination host cpu-1 is already ACTIVE {{(pid=61898) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1066.114986] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.115156] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.115347] env[61898]: DEBUG nova.network.neutron [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.147531] env[61898]: DEBUG nova.objects.instance [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lazy-loading 'numa_topology' on Instance uuid 523a29df-e21d-4e38-9437-ebcdd7012f57 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.325633] env[61898]: DEBUG nova.compute.manager [req-25624485-7a33-4e99-a778-1e05fa309f35 req-4560bcf3-9063-4560-9fa5-74a8c858a08c service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Received event network-changed-fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1066.325906] env[61898]: DEBUG nova.compute.manager [req-25624485-7a33-4e99-a778-1e05fa309f35 req-4560bcf3-9063-4560-9fa5-74a8c858a08c service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Refreshing instance network info cache due to event network-changed-fc927434-188b-4c42-9200-bcb870385a25. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1066.333136] env[61898]: DEBUG oslo_concurrency.lockutils [req-25624485-7a33-4e99-a778-1e05fa309f35 req-4560bcf3-9063-4560-9fa5-74a8c858a08c service nova] Acquiring lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1066.333136] env[61898]: DEBUG oslo_concurrency.lockutils [req-25624485-7a33-4e99-a778-1e05fa309f35 req-4560bcf3-9063-4560-9fa5-74a8c858a08c service nova] Acquired lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.333136] env[61898]: DEBUG nova.network.neutron [req-25624485-7a33-4e99-a778-1e05fa309f35 req-4560bcf3-9063-4560-9fa5-74a8c858a08c service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Refreshing network info cache for port fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1066.573613] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 622326f9-b3c5-452e-b7f6-dfe6de1e7d4b] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1066.650994] env[61898]: DEBUG nova.objects.base [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Object Instance<523a29df-e21d-4e38-9437-ebcdd7012f57> lazy-loaded attributes: resources,numa_topology {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1066.714441] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6a895e-536c-4d97-82e3-47171c3e1b25 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.722870] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8956d88-72cb-4c00-819e-ea67843c0346 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.753924] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4fbe562-ac63-442f-ae63-57563e551204 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.767289] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b0db72-83c3-4853-a557-7626e3854905 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.786046] env[61898]: DEBUG nova.compute.provider_tree [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.911598] env[61898]: DEBUG nova.network.neutron [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance_info_cache with network_info: [{"id": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "address": "fa:16:3e:f7:46:22", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630c2b2d-b1", "ovs_interfaceid": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.080119] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: a2ceed2d-be5e-4baa-b2a7-1116812e775d] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1067.186863] env[61898]: DEBUG nova.network.neutron [req-25624485-7a33-4e99-a778-1e05fa309f35 req-4560bcf3-9063-4560-9fa5-74a8c858a08c service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updated VIF entry in instance network info cache for port fc927434-188b-4c42-9200-bcb870385a25. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1067.187275] env[61898]: DEBUG nova.network.neutron [req-25624485-7a33-4e99-a778-1e05fa309f35 req-4560bcf3-9063-4560-9fa5-74a8c858a08c service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updating instance_info_cache with network_info: [{"id": "fc927434-188b-4c42-9200-bcb870385a25", "address": "fa:16:3e:f0:b0:60", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapfc927434-18", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.289704] env[61898]: DEBUG nova.scheduler.client.report [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1067.418792] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1067.571627] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "523a29df-e21d-4e38-9437-ebcdd7012f57" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.585405] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 4522f4ef-c8f6-4fe1-acd5-796f87f22839] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1067.690555] env[61898]: DEBUG oslo_concurrency.lockutils [req-25624485-7a33-4e99-a778-1e05fa309f35 req-4560bcf3-9063-4560-9fa5-74a8c858a08c service nova] Releasing lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1067.794880] env[61898]: DEBUG oslo_concurrency.lockutils [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.150s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.923109] env[61898]: DEBUG nova.compute.manager [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=61898) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:900}} [ 1068.087011] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 5829d09f-d7bb-4e2c-8b2d-4cd8e20607c1] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1068.305443] env[61898]: DEBUG oslo_concurrency.lockutils [None req-18b60aef-c2a5-4369-a92a-9130d0e55f8b tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.365s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.306468] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.735s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.306468] env[61898]: INFO nova.compute.manager [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Unshelving [ 1068.385274] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquiring lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.385500] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.590254] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 47208ebd-8407-4d00-8378-adb0a4a21c2a] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1068.888360] env[61898]: DEBUG nova.compute.manager [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1069.038694] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.038928] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.094022] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: cd1335b7-78b7-4cea-add7-dd69736067b0] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1069.335808] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.407241] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1069.542718] env[61898]: DEBUG nova.objects.instance [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'migration_context' on Instance uuid d0184b78-1525-44a4-a515-3eeb34a59cde {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.597430] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: bfafe501-3aa7-4e45-b6e6-24bd0e1b5b52] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1070.101934] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: b6f6c28b-fceb-45c9-992e-e67c4fc2a3f4] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1070.112992] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36133066-154f-46bd-9dd1-f7ad80ecee8c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.120563] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8148016-94cf-41a8-b754-16ddf2c62e5f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.150536] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9116e5a-56c9-41f5-914d-41a72491beb7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.158026] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec3c685-c5ee-4976-986f-b1a92e233348 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.171276] env[61898]: DEBUG nova.compute.provider_tree [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.605509] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: cf428138-4d0d-43bf-a654-06a62a82c9a1] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1070.674530] env[61898]: DEBUG nova.scheduler.client.report [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1071.114246] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 11ca5129-0dc3-44b3-8f7b-215c93dac764] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1071.617684] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: cdd5f647-2c43-4389-820d-2d39d7d20889] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1071.685832] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.647s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.691373] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.356s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.691601] env[61898]: DEBUG nova.objects.instance [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lazy-loading 'pci_requests' on Instance uuid 523a29df-e21d-4e38-9437-ebcdd7012f57 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1072.120814] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 57aa8b33-a6c8-4b8e-b79a-f7b8ed2811fb] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1072.199233] env[61898]: DEBUG nova.objects.instance [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lazy-loading 'numa_topology' on Instance uuid 523a29df-e21d-4e38-9437-ebcdd7012f57 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1072.624190] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 9afa94d2-16a1-484f-96b4-8bbd93829ffe] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1072.702918] env[61898]: INFO nova.compute.claims [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.127405] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 5323b250-fad8-4d71-81ed-c5e5eeb8aeab] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1073.231107] env[61898]: INFO nova.compute.manager [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Swapping old allocation on dict_keys(['79886f75-94e9-4bf0-9cbd-87f3715d3144']) held by migration df03973c-d1e5-4406-8a29-8b137fc9727b for instance [ 1073.254173] env[61898]: DEBUG nova.scheduler.client.report [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Overwriting current allocation {'allocations': {'79886f75-94e9-4bf0-9cbd-87f3715d3144': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 138}}, 'project_id': '975e564bd7f442629018b97007460e00', 'user_id': '2ce8ddf4b7fe4e0583f09e7f88ab5e70', 'consumer_generation': 1} on consumer d0184b78-1525-44a4-a515-3eeb34a59cde {{(pid=61898) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 1073.331352] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1073.331542] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.331724] env[61898]: DEBUG nova.network.neutron [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1073.631054] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 80931b22-a69b-41cd-b707-13bf11111b88] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1073.765199] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c098fcc3-7daa-4034-8cac-0646f34ba272 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.772470] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8160abc7-a217-4cc3-8716-29eb8cf41c7b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.801042] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aec5809-83e8-40f5-87ff-6aa7635a515c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.807614] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ce9c39-45ba-475b-a157-faaed50df193 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.819897] env[61898]: DEBUG nova.compute.provider_tree [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.026587] env[61898]: DEBUG nova.network.neutron [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance_info_cache with network_info: [{"id": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "address": "fa:16:3e:f7:46:22", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630c2b2d-b1", "ovs_interfaceid": "630c2b2d-b17e-470f-ad5f-506c4734d40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.133882] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 01685478-9d68-4edd-8dff-7d63fcd8bcd3] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1074.322584] env[61898]: DEBUG nova.scheduler.client.report [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1074.529433] env[61898]: DEBUG oslo_concurrency.lockutils [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-d0184b78-1525-44a4-a515-3eeb34a59cde" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1074.530942] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8607e2df-228e-4120-ab53-105451c724b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.538054] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7654fa-7de8-4d42-a8df-edb90ab4d0ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.637578] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 49f22529-f68f-4b1f-8cb3-7ca2a6cbf1f5] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1074.827087] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.136s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1074.829729] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.423s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.831245] env[61898]: INFO nova.compute.claims [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.859921] env[61898]: INFO nova.network.neutron [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updating port fc927434-188b-4c42-9200-bcb870385a25 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1075.141244] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: e5c38d18-18e4-47dc-8445-71d3dc0c325a] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1075.618079] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.618441] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93b15d0d-a1a0-462f-80e0-2b394a7e7f81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.626718] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1075.626718] env[61898]: value = "task-1241263" [ 1075.626718] env[61898]: _type = "Task" [ 1075.626718] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.634334] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.643836] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 9b7b9962-fda1-46af-9ecc-ea5b352d5193] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1075.900089] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6316a650-793e-481f-ab94-e3b785069657 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.907252] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f641215-fcb0-4e06-b4a1-37c910d6ec2f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.936055] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d8258e-57cd-4615-a0ce-35043cda4b48 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.943477] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6afb3f6e-4856-46fd-a921-5b93d9be3dcb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.957240] env[61898]: DEBUG nova.compute.provider_tree [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.137115] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241263, 'name': PowerOffVM_Task, 'duration_secs': 0.210288} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.137417] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1076.138102] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1076.138349] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1076.138523] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1076.138713] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1076.138871] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1076.139118] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1076.139342] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1076.139507] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1076.139677] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1076.139870] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1076.140144] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1076.144897] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f84785a8-c129-4fd3-b89f-8780cfbd87b3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.155132] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 43b823d5-94f9-4b2a-b5d9-31ef6ecb5f4d] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1076.162813] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1076.162813] env[61898]: value = "task-1241264" [ 1076.162813] env[61898]: _type = "Task" [ 1076.162813] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.172440] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241264, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.244709] env[61898]: DEBUG nova.compute.manager [req-68b32f7b-1adb-4d2b-af7c-bf1130cad0d2 req-ca747fb5-6282-43ab-87d4-46f65bea66e8 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Received event network-vif-plugged-fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1076.244951] env[61898]: DEBUG oslo_concurrency.lockutils [req-68b32f7b-1adb-4d2b-af7c-bf1130cad0d2 req-ca747fb5-6282-43ab-87d4-46f65bea66e8 service nova] Acquiring lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.245270] env[61898]: DEBUG oslo_concurrency.lockutils [req-68b32f7b-1adb-4d2b-af7c-bf1130cad0d2 req-ca747fb5-6282-43ab-87d4-46f65bea66e8 service nova] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.245519] env[61898]: DEBUG oslo_concurrency.lockutils [req-68b32f7b-1adb-4d2b-af7c-bf1130cad0d2 req-ca747fb5-6282-43ab-87d4-46f65bea66e8 service nova] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.245678] env[61898]: DEBUG nova.compute.manager [req-68b32f7b-1adb-4d2b-af7c-bf1130cad0d2 req-ca747fb5-6282-43ab-87d4-46f65bea66e8 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] No waiting events found dispatching network-vif-plugged-fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1076.245837] env[61898]: WARNING nova.compute.manager [req-68b32f7b-1adb-4d2b-af7c-bf1130cad0d2 req-ca747fb5-6282-43ab-87d4-46f65bea66e8 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Received unexpected event network-vif-plugged-fc927434-188b-4c42-9200-bcb870385a25 for instance with vm_state shelved_offloaded and task_state spawning. [ 1076.329465] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.329656] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.329941] env[61898]: DEBUG nova.network.neutron [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1076.459787] env[61898]: DEBUG nova.scheduler.client.report [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1076.659045] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 5fc14058-7953-4e6a-a9ef-7933d61e9f3e] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1076.672089] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241264, 'name': ReconfigVM_Task, 'duration_secs': 0.151429} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.672873] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f8ec97-f195-49a2-8060-e967f100253d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.694185] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1076.694432] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1076.694592] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1076.694776] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1076.694928] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1076.695094] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1076.695304] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1076.695468] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1076.695633] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1076.695796] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1076.695969] env[61898]: DEBUG nova.virt.hardware [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1076.697431] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc211572-e242-4405-9e80-9f4839e72c3c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.703108] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1076.703108] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b57248-dc74-aeec-206e-6858116c8d1f" [ 1076.703108] env[61898]: _type = "Task" [ 1076.703108] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.710483] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b57248-dc74-aeec-206e-6858116c8d1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.965261] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.135s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.966049] env[61898]: DEBUG nova.compute.manager [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1077.039840] env[61898]: DEBUG nova.network.neutron [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updating instance_info_cache with network_info: [{"id": "fc927434-188b-4c42-9200-bcb870385a25", "address": "fa:16:3e:f0:b0:60", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc927434-18", "ovs_interfaceid": "fc927434-188b-4c42-9200-bcb870385a25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.161557] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: eda63357-6749-4652-914a-dc5b69163eb6] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1077.214146] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b57248-dc74-aeec-206e-6858116c8d1f, 'name': SearchDatastore_Task, 'duration_secs': 0.010332} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.219625] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfiguring VM instance instance-00000065 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1077.220153] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f370b86-dcc8-4813-b4cd-aaaf04cbe11f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.237609] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1077.237609] env[61898]: value = "task-1241265" [ 1077.237609] env[61898]: _type = "Task" [ 1077.237609] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.471773] env[61898]: DEBUG nova.compute.utils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1077.473268] env[61898]: DEBUG nova.compute.manager [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1077.473442] env[61898]: DEBUG nova.network.neutron [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1077.509929] env[61898]: DEBUG nova.policy [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e49628d4c164d2c960b5633f2298489', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22e580379ada4df68e75719930fe6416', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 1077.542362] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.570135] env[61898]: DEBUG nova.virt.hardware [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c06157830e6e9c057f0a0b9c64237c85',container_format='bare',created_at=2024-10-10T12:03:35Z,direct_url=,disk_format='vmdk',id=e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1520842663-shelved',owner='a6198f817d1b471483500fe05c9bef3f',properties=ImageMetaProps,protected=,size=31662592,status='active',tags=,updated_at=2024-10-10T12:03:49Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1077.570406] env[61898]: DEBUG nova.virt.hardware [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1077.570572] env[61898]: DEBUG nova.virt.hardware [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1077.570751] env[61898]: DEBUG nova.virt.hardware [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1077.570897] env[61898]: DEBUG nova.virt.hardware [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1077.571393] env[61898]: DEBUG nova.virt.hardware [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1077.571393] env[61898]: DEBUG nova.virt.hardware [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1077.571506] env[61898]: DEBUG nova.virt.hardware [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1077.571580] env[61898]: DEBUG nova.virt.hardware [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1077.571762] env[61898]: DEBUG nova.virt.hardware [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1077.571894] env[61898]: DEBUG nova.virt.hardware [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1077.572773] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2d28ce-c52f-4f0d-86e2-ba8d0136e7a5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.580728] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0d70e2-4b91-4cf9-9e60-8895e95cb4b0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.594669] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:b0:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc927434-188b-4c42-9200-bcb870385a25', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1077.602308] env[61898]: DEBUG oslo.service.loopingcall [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1077.602308] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1077.602484] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37aa3adc-4b9d-436c-b385-5d250fafbc11 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.621017] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1077.621017] env[61898]: value = "task-1241266" [ 1077.621017] env[61898]: _type = "Task" [ 1077.621017] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.628093] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241266, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.664802] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 51f33e74-0bb3-488c-9a6d-d1ccc53f469b] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1077.748114] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241265, 'name': ReconfigVM_Task, 'duration_secs': 0.228892} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.748114] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfigured VM instance instance-00000065 to detach disk 2000 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1077.749643] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50daa45d-16b5-469f-b1b9-d95cbef3dc22 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.775087] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] d0184b78-1525-44a4-a515-3eeb34a59cde/d0184b78-1525-44a4-a515-3eeb34a59cde.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1077.776103] env[61898]: DEBUG nova.network.neutron [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Successfully created port: adc0e331-b036-4452-8908-97777cb02434 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1077.777937] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-450f4856-43dc-41f7-9182-0053b63e6a9e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.796061] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1077.796061] env[61898]: value = "task-1241267" [ 1077.796061] env[61898]: _type = "Task" [ 1077.796061] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.803722] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241267, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.977037] env[61898]: DEBUG nova.compute.manager [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1078.130893] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241266, 'name': CreateVM_Task, 'duration_secs': 0.304801} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.131097] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1078.131830] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.132214] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.132609] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1078.132889] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dad8722-c4ac-435a-b1c5-56e95faf3a85 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.137519] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1078.137519] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]522168bb-603f-e234-e841-d20f5b1a38ef" [ 1078.137519] env[61898]: _type = "Task" [ 1078.137519] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.145246] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]522168bb-603f-e234-e841-d20f5b1a38ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.167812] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: e19e820c-154d-4e91-8631-dab9439d11a2] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1078.271134] env[61898]: DEBUG nova.compute.manager [req-a99f8611-536c-438a-a11a-a91f6d45cda0 req-6573a136-0ecf-4ead-9b77-f33fc55a8652 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Received event network-changed-fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1078.271336] env[61898]: DEBUG nova.compute.manager [req-a99f8611-536c-438a-a11a-a91f6d45cda0 req-6573a136-0ecf-4ead-9b77-f33fc55a8652 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Refreshing instance network info cache due to event network-changed-fc927434-188b-4c42-9200-bcb870385a25. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1078.271547] env[61898]: DEBUG oslo_concurrency.lockutils [req-a99f8611-536c-438a-a11a-a91f6d45cda0 req-6573a136-0ecf-4ead-9b77-f33fc55a8652 service nova] Acquiring lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.271693] env[61898]: DEBUG oslo_concurrency.lockutils [req-a99f8611-536c-438a-a11a-a91f6d45cda0 req-6573a136-0ecf-4ead-9b77-f33fc55a8652 service nova] Acquired lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.271852] env[61898]: DEBUG nova.network.neutron [req-a99f8611-536c-438a-a11a-a91f6d45cda0 req-6573a136-0ecf-4ead-9b77-f33fc55a8652 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Refreshing network info cache for port fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1078.305642] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241267, 'name': ReconfigVM_Task, 'duration_secs': 0.338809} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.305919] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfigured VM instance instance-00000065 to attach disk [datastore1] d0184b78-1525-44a4-a515-3eeb34a59cde/d0184b78-1525-44a4-a515-3eeb34a59cde.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1078.306979] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3118aa70-693b-4b4d-aff1-988cc0f0473b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.327486] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a1def9-c11f-47ac-9129-f846c7d783aa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.348302] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763e1298-465f-4b1d-8f91-df4912570811 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.369172] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651852c3-32fc-4443-83be-836395f08010 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.376155] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1078.376419] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3abdcc9f-5273-464d-af4a-840d1de6e73b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.381756] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1078.381756] env[61898]: value = "task-1241268" [ 1078.381756] env[61898]: _type = "Task" [ 1078.381756] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.393015] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241268, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.647983] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.648273] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Processing image e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1078.648512] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.648666] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.648844] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1078.649201] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd7e7c77-b995-4321-a0d7-870d6660063f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.657246] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1078.657494] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1078.658209] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57ec2a22-37be-48e5-95c4-b2ed6de7f222 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.662939] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1078.662939] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b75c20-c30d-3ebc-c32c-851b5bc9de79" [ 1078.662939] env[61898]: _type = "Task" [ 1078.662939] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.670713] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 86367a82-239b-4f6e-b306-d9661eadf95e] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1078.672482] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b75c20-c30d-3ebc-c32c-851b5bc9de79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.892385] env[61898]: DEBUG oslo_vmware.api [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241268, 'name': PowerOnVM_Task, 'duration_secs': 0.392399} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.892690] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1078.986484] env[61898]: DEBUG nova.compute.manager [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1079.013735] env[61898]: DEBUG nova.virt.hardware [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1079.014040] env[61898]: DEBUG nova.virt.hardware [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1079.014249] env[61898]: DEBUG nova.virt.hardware [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1079.014447] env[61898]: DEBUG nova.virt.hardware [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1079.014597] env[61898]: DEBUG nova.virt.hardware [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1079.014748] env[61898]: DEBUG nova.virt.hardware [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1079.014960] env[61898]: DEBUG nova.virt.hardware [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1079.015141] env[61898]: DEBUG nova.virt.hardware [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1079.015323] env[61898]: DEBUG nova.virt.hardware [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1079.015526] env[61898]: DEBUG nova.virt.hardware [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1079.015752] env[61898]: DEBUG nova.virt.hardware [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1079.016709] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6d9d48-33bd-4671-82c6-ef0f0f61745a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.026975] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3148da22-c2e8-496b-8e1b-11a59d4ce3eb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.052439] env[61898]: DEBUG nova.network.neutron [req-a99f8611-536c-438a-a11a-a91f6d45cda0 req-6573a136-0ecf-4ead-9b77-f33fc55a8652 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updated VIF entry in instance network info cache for port fc927434-188b-4c42-9200-bcb870385a25. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1079.052851] env[61898]: DEBUG nova.network.neutron [req-a99f8611-536c-438a-a11a-a91f6d45cda0 req-6573a136-0ecf-4ead-9b77-f33fc55a8652 service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updating instance_info_cache with network_info: [{"id": "fc927434-188b-4c42-9200-bcb870385a25", "address": "fa:16:3e:f0:b0:60", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc927434-18", "ovs_interfaceid": "fc927434-188b-4c42-9200-bcb870385a25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.173110] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Preparing fetch location {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1079.173382] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Fetch image to [datastore2] OSTACK_IMG_2d08cb66-c77e-4f84-9a25-17125fc4fc71/OSTACK_IMG_2d08cb66-c77e-4f84-9a25-17125fc4fc71.vmdk {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1079.173613] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Downloading stream optimized image e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f to [datastore2] OSTACK_IMG_2d08cb66-c77e-4f84-9a25-17125fc4fc71/OSTACK_IMG_2d08cb66-c77e-4f84-9a25-17125fc4fc71.vmdk on the data store datastore2 as vApp {{(pid=61898) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1079.173733] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Downloading image file data e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f to the ESX as VM named 'OSTACK_IMG_2d08cb66-c77e-4f84-9a25-17125fc4fc71' {{(pid=61898) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1079.175772] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 626caecc-6389-4064-aafd-9968cee262ee] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1079.242064] env[61898]: DEBUG oslo_vmware.rw_handles [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1079.242064] env[61898]: value = "resgroup-9" [ 1079.242064] env[61898]: _type = "ResourcePool" [ 1079.242064] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1079.242372] env[61898]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d3fcedb3-66b0-4063-91ac-429dd832118f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.258624] env[61898]: DEBUG nova.network.neutron [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Successfully updated port: adc0e331-b036-4452-8908-97777cb02434 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1079.265546] env[61898]: DEBUG oslo_vmware.rw_handles [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lease: (returnval){ [ 1079.265546] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7fac0-1107-c62a-00e2-6d53926fa5b5" [ 1079.265546] env[61898]: _type = "HttpNfcLease" [ 1079.265546] env[61898]: } obtained for vApp import into resource pool (val){ [ 1079.265546] env[61898]: value = "resgroup-9" [ 1079.265546] env[61898]: _type = "ResourcePool" [ 1079.265546] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1079.265930] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the lease: (returnval){ [ 1079.265930] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7fac0-1107-c62a-00e2-6d53926fa5b5" [ 1079.265930] env[61898]: _type = "HttpNfcLease" [ 1079.265930] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1079.272435] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1079.272435] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7fac0-1107-c62a-00e2-6d53926fa5b5" [ 1079.272435] env[61898]: _type = "HttpNfcLease" [ 1079.272435] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1079.555866] env[61898]: DEBUG oslo_concurrency.lockutils [req-a99f8611-536c-438a-a11a-a91f6d45cda0 req-6573a136-0ecf-4ead-9b77-f33fc55a8652 service nova] Releasing lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.679606] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 070bc0cc-ff77-48b8-bd08-f17fe69e25af] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1079.761579] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquiring lock "refresh_cache-456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.761767] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquired lock "refresh_cache-456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.762018] env[61898]: DEBUG nova.network.neutron [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.774581] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1079.774581] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7fac0-1107-c62a-00e2-6d53926fa5b5" [ 1079.774581] env[61898]: _type = "HttpNfcLease" [ 1079.774581] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1079.792600] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.792863] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.937703] env[61898]: INFO nova.compute.manager [None req-0586b0c0-8791-402b-9231-acce1a454914 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance to original state: 'active' [ 1080.182846] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: b709df92-bf56-40ed-ba48-a8fa19be8b68] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1080.275247] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1080.275247] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7fac0-1107-c62a-00e2-6d53926fa5b5" [ 1080.275247] env[61898]: _type = "HttpNfcLease" [ 1080.275247] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1080.295516] env[61898]: DEBUG nova.compute.utils [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1080.297320] env[61898]: DEBUG nova.network.neutron [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1080.321847] env[61898]: DEBUG nova.compute.manager [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Received event network-vif-plugged-adc0e331-b036-4452-8908-97777cb02434 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1080.322091] env[61898]: DEBUG oslo_concurrency.lockutils [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] Acquiring lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.322735] env[61898]: DEBUG oslo_concurrency.lockutils [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] Lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.322864] env[61898]: DEBUG oslo_concurrency.lockutils [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] Lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.323057] env[61898]: DEBUG nova.compute.manager [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] No waiting events found dispatching network-vif-plugged-adc0e331-b036-4452-8908-97777cb02434 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1080.323231] env[61898]: WARNING nova.compute.manager [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Received unexpected event network-vif-plugged-adc0e331-b036-4452-8908-97777cb02434 for instance with vm_state building and task_state spawning. [ 1080.323405] env[61898]: DEBUG nova.compute.manager [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Received event network-changed-adc0e331-b036-4452-8908-97777cb02434 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1080.323541] env[61898]: DEBUG nova.compute.manager [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Refreshing instance network info cache due to event network-changed-adc0e331-b036-4452-8908-97777cb02434. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1080.323711] env[61898]: DEBUG oslo_concurrency.lockutils [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] Acquiring lock "refresh_cache-456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.430469] env[61898]: DEBUG nova.network.neutron [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Updating instance_info_cache with network_info: [{"id": "adc0e331-b036-4452-8908-97777cb02434", "address": "fa:16:3e:0b:7e:81", "network": {"id": "910d7057-086d-4195-b6d2-aa6badd762e0", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2109259466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22e580379ada4df68e75719930fe6416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadc0e331-b0", "ovs_interfaceid": "adc0e331-b036-4452-8908-97777cb02434", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.686247] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: d6c96dce-13ae-411a-b52a-fee484718a8a] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1080.777405] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1080.777405] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7fac0-1107-c62a-00e2-6d53926fa5b5" [ 1080.777405] env[61898]: _type = "HttpNfcLease" [ 1080.777405] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1080.800406] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1080.932831] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Releasing lock "refresh_cache-456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.933223] env[61898]: DEBUG nova.compute.manager [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Instance network_info: |[{"id": "adc0e331-b036-4452-8908-97777cb02434", "address": "fa:16:3e:0b:7e:81", "network": {"id": "910d7057-086d-4195-b6d2-aa6badd762e0", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2109259466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22e580379ada4df68e75719930fe6416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadc0e331-b0", "ovs_interfaceid": "adc0e331-b036-4452-8908-97777cb02434", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1080.933614] env[61898]: DEBUG oslo_concurrency.lockutils [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] Acquired lock "refresh_cache-456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.933723] env[61898]: DEBUG nova.network.neutron [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Refreshing network info cache for port adc0e331-b036-4452-8908-97777cb02434 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1080.939964] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:7e:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbdab640-5fea-4254-8bd3-f855b7eaca0d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'adc0e331-b036-4452-8908-97777cb02434', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1080.946695] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Creating folder: Project (22e580379ada4df68e75719930fe6416). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1080.947339] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-73893002-7027-44de-904a-278eb179beae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.960477] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Created folder: Project (22e580379ada4df68e75719930fe6416) in parent group-v267550. [ 1080.960678] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Creating folder: Instances. Parent ref: group-v267731. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1080.960918] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35809222-6a53-4da6-a0cc-4881aee5c6ce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.969632] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Created folder: Instances in parent group-v267731. [ 1080.969852] env[61898]: DEBUG oslo.service.loopingcall [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1080.970082] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1080.970290] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a4d2a70-fe7d-4754-b7ba-15429b6d10db {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.989203] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1080.989203] env[61898]: value = "task-1241272" [ 1080.989203] env[61898]: _type = "Task" [ 1080.989203] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.996513] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241272, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.080642] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "d0184b78-1525-44a4-a515-3eeb34a59cde" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.080931] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.081164] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "d0184b78-1525-44a4-a515-3eeb34a59cde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.081364] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.081544] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.084056] env[61898]: INFO nova.compute.manager [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Terminating instance [ 1081.189536] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 7c6aad92-6e91-48fc-89ae-5ee4c89f449c] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1081.276315] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1081.276315] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7fac0-1107-c62a-00e2-6d53926fa5b5" [ 1081.276315] env[61898]: _type = "HttpNfcLease" [ 1081.276315] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1081.498686] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241272, 'name': CreateVM_Task, 'duration_secs': 0.34551} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.498913] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1081.499580] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.499775] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.500124] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1081.500654] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eea1cb0-6868-4dd2-a560-61a5bffa6509 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.505062] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for the task: (returnval){ [ 1081.505062] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]524cf8b0-3d7d-6fdb-74e5-09fcccaf070d" [ 1081.505062] env[61898]: _type = "Task" [ 1081.505062] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.512795] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524cf8b0-3d7d-6fdb-74e5-09fcccaf070d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.587738] env[61898]: DEBUG nova.compute.manager [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1081.588031] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1081.588327] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d13f4f91-2c2e-418f-a5b0-ad2dea754720 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.595438] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1081.595438] env[61898]: value = "task-1241273" [ 1081.595438] env[61898]: _type = "Task" [ 1081.595438] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.602963] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241273, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.693905] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 52a584e1-61ae-447d-90e0-e15d32a96314] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1081.777795] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1081.777795] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7fac0-1107-c62a-00e2-6d53926fa5b5" [ 1081.777795] env[61898]: _type = "HttpNfcLease" [ 1081.777795] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1081.842106] env[61898]: DEBUG nova.network.neutron [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Updated VIF entry in instance network info cache for port adc0e331-b036-4452-8908-97777cb02434. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1081.842498] env[61898]: DEBUG nova.network.neutron [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Updating instance_info_cache with network_info: [{"id": "adc0e331-b036-4452-8908-97777cb02434", "address": "fa:16:3e:0b:7e:81", "network": {"id": "910d7057-086d-4195-b6d2-aa6badd762e0", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2109259466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22e580379ada4df68e75719930fe6416", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadc0e331-b0", "ovs_interfaceid": "adc0e331-b036-4452-8908-97777cb02434", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.859505] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.859776] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.860104] env[61898]: INFO nova.compute.manager [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Attaching volume bdeba398-0cb6-409b-9caf-e9b957918e7d to /dev/sdb [ 1081.903337] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1504c432-6ce8-4d66-9d4d-743ba5d82811 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.910895] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa96f7e-cecf-4d4c-9f42-964f62b2e376 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.925364] env[61898]: DEBUG nova.virt.block_device [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Updating existing volume attachment record: aad552a0-6965-4816-aa73-2bea2dcb13a7 {{(pid=61898) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1082.014896] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524cf8b0-3d7d-6fdb-74e5-09fcccaf070d, 'name': SearchDatastore_Task, 'duration_secs': 0.011243} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.015255] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.015497] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1082.015728] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.015875] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.016062] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1082.016318] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f70fab3-1670-4d71-af7a-5d95dfa03130 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.024101] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1082.024314] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1082.024994] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8618c4b5-9b88-47bb-868f-1bc1130165ae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.029555] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for the task: (returnval){ [ 1082.029555] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5244e3bd-8094-f89d-f0e9-6530eac25749" [ 1082.029555] env[61898]: _type = "Task" [ 1082.029555] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.036816] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5244e3bd-8094-f89d-f0e9-6530eac25749, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.104284] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241273, 'name': PowerOffVM_Task, 'duration_secs': 0.245338} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.104558] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1082.104758] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Volume detach. Driver type: vmdk {{(pid=61898) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1082.104952] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267723', 'volume_id': '06dc7a5e-d2cb-441b-8708-03e168a9b278', 'name': 'volume-06dc7a5e-d2cb-441b-8708-03e168a9b278', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'd0184b78-1525-44a4-a515-3eeb34a59cde', 'attached_at': '2024-10-10T12:04:04.000000', 'detached_at': '', 'volume_id': '06dc7a5e-d2cb-441b-8708-03e168a9b278', 'serial': '06dc7a5e-d2cb-441b-8708-03e168a9b278'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1082.105711] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd49d78-0aee-4e5b-8adc-d603703f16ec {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.126327] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e0fade-0bf7-429c-a82b-3209c445817f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.132469] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbf2c9d-6d08-4c58-8978-0f39baa0b5a1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.152158] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d41a64a-d7ac-41cc-93d0-b2b7653e3c7c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.166243] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] The volume has not been displaced from its original location: [datastore2] volume-06dc7a5e-d2cb-441b-8708-03e168a9b278/volume-06dc7a5e-d2cb-441b-8708-03e168a9b278.vmdk. No consolidation needed. {{(pid=61898) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1082.171519] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1082.171845] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21a334a7-16a3-4b93-a01a-0dfd7eef4d61 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.189605] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1082.189605] env[61898]: value = "task-1241277" [ 1082.189605] env[61898]: _type = "Task" [ 1082.189605] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.199035] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: e851d73d-58f0-486a-a95c-70d07e5faad2] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1082.200768] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241277, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.276455] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1082.276455] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7fac0-1107-c62a-00e2-6d53926fa5b5" [ 1082.276455] env[61898]: _type = "HttpNfcLease" [ 1082.276455] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1082.276707] env[61898]: DEBUG oslo_vmware.rw_handles [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1082.276707] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7fac0-1107-c62a-00e2-6d53926fa5b5" [ 1082.276707] env[61898]: _type = "HttpNfcLease" [ 1082.276707] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1082.277473] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb92583-021b-42c6-910f-0ac72f0bb00d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.284417] env[61898]: DEBUG oslo_vmware.rw_handles [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c4b2ad-3504-f105-53ab-1a2f9b4a6b10/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1082.284621] env[61898]: DEBUG oslo_vmware.rw_handles [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating HTTP connection to write to file with size = 31662592 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c4b2ad-3504-f105-53ab-1a2f9b4a6b10/disk-0.vmdk. {{(pid=61898) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1082.346505] env[61898]: DEBUG oslo_concurrency.lockutils [req-1b7be605-3551-44c2-a362-c6609e3869b0 req-f012e93d-d5b6-4148-ad9a-674a314edceb service nova] Releasing lock "refresh_cache-456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.351659] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-977c17c9-c1bb-459f-a0d3-87e71c838430 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.544562] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5244e3bd-8094-f89d-f0e9-6530eac25749, 'name': SearchDatastore_Task, 'duration_secs': 0.013886} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.545427] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cbc83c7-4abb-435c-b93f-167e821ef1f1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.550944] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for the task: (returnval){ [ 1082.550944] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e70ecc-ecc5-60b1-ca06-a3bd621751b9" [ 1082.550944] env[61898]: _type = "Task" [ 1082.550944] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.560138] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e70ecc-ecc5-60b1-ca06-a3bd621751b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.700319] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241277, 'name': ReconfigVM_Task, 'duration_secs': 0.203949} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.701951] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1082.706634] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: b106ab9e-08d4-4d18-90e0-13a071c9efb1] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1082.709948] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-47b8ac05-fa0b-4696-9bb9-6560478a5b3d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.728812] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1082.728812] env[61898]: value = "task-1241278" [ 1082.728812] env[61898]: _type = "Task" [ 1082.728812] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.736872] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241278, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.060929] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e70ecc-ecc5-60b1-ca06-a3bd621751b9, 'name': SearchDatastore_Task, 'duration_secs': 0.012599} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.061215] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1083.061508] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef/456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1083.061777] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd621ad4-5f92-4e33-828c-5dc623fb5852 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.068694] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for the task: (returnval){ [ 1083.068694] env[61898]: value = "task-1241279" [ 1083.068694] env[61898]: _type = "Task" [ 1083.068694] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.075972] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241279, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.221341] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: ca6b01f9-8c57-4466-8dca-cd4c2cd0e66e] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1083.240375] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241278, 'name': ReconfigVM_Task, 'duration_secs': 0.151721} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.241891] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267723', 'volume_id': '06dc7a5e-d2cb-441b-8708-03e168a9b278', 'name': 'volume-06dc7a5e-d2cb-441b-8708-03e168a9b278', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'd0184b78-1525-44a4-a515-3eeb34a59cde', 'attached_at': '2024-10-10T12:04:04.000000', 'detached_at': '', 'volume_id': '06dc7a5e-d2cb-441b-8708-03e168a9b278', 'serial': '06dc7a5e-d2cb-441b-8708-03e168a9b278'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1083.242235] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1083.243074] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7918e6-5d95-4f79-9acb-d15843b354ab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.250755] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1083.252560] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-989047d2-3023-4160-8693-541acb4bc558 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.317270] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1083.317513] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1083.317709] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleting the datastore file [datastore1] d0184b78-1525-44a4-a515-3eeb34a59cde {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1083.318024] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4cf157d8-1d14-4b3d-90e8-0af64ceb8f17 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.324675] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1083.324675] env[61898]: value = "task-1241281" [ 1083.324675] env[61898]: _type = "Task" [ 1083.324675] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.334581] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241281, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.579497] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241279, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.724693] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 5b51a1a5-7d54-4063-b680-e8b8b39fc46a] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1083.789277] env[61898]: DEBUG oslo_vmware.rw_handles [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Completed reading data from the image iterator. {{(pid=61898) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1083.789525] env[61898]: DEBUG oslo_vmware.rw_handles [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c4b2ad-3504-f105-53ab-1a2f9b4a6b10/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1083.790537] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41cdb19e-0c0f-48ad-a6d8-2c626a330bcf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.797307] env[61898]: DEBUG oslo_vmware.rw_handles [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c4b2ad-3504-f105-53ab-1a2f9b4a6b10/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1083.797478] env[61898]: DEBUG oslo_vmware.rw_handles [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c4b2ad-3504-f105-53ab-1a2f9b4a6b10/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1083.797698] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-77aaf82a-a72a-44af-b9d6-18f321bd3b5d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.833980] env[61898]: DEBUG oslo_vmware.api [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241281, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.36876} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.834225] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1083.834416] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1083.834600] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1083.834773] env[61898]: INFO nova.compute.manager [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Took 2.25 seconds to destroy the instance on the hypervisor. [ 1083.835029] env[61898]: DEBUG oslo.service.loopingcall [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1083.835236] env[61898]: DEBUG nova.compute.manager [-] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1083.835328] env[61898]: DEBUG nova.network.neutron [-] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1083.988111] env[61898]: DEBUG oslo_vmware.rw_handles [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c4b2ad-3504-f105-53ab-1a2f9b4a6b10/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1083.988396] env[61898]: INFO nova.virt.vmwareapi.images [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Downloaded image file data e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f [ 1083.989302] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36506cf8-784d-4373-b9b1-5254538b7461 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.005953] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86a499a1-829f-4cd2-b585-051f16cde049 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.033290] env[61898]: INFO nova.virt.vmwareapi.images [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] The imported VM was unregistered [ 1084.035778] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Caching image {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1084.036040] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Creating directory with path [datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.036319] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f97da2b7-cf16-43bd-b1ce-264aa475dd6a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.046866] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Created directory with path [datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.047079] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_2d08cb66-c77e-4f84-9a25-17125fc4fc71/OSTACK_IMG_2d08cb66-c77e-4f84-9a25-17125fc4fc71.vmdk to [datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f.vmdk. {{(pid=61898) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1084.047347] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-e36ecfdb-9690-47bf-b3a8-792ddbe3787e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.053718] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1084.053718] env[61898]: value = "task-1241283" [ 1084.053718] env[61898]: _type = "Task" [ 1084.053718] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.061352] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241283, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.077315] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241279, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552567} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.077532] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef/456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1084.077654] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1084.077801] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21ec3c77-c21a-4edc-b6e7-bdd5cef0cc47 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.085881] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for the task: (returnval){ [ 1084.085881] env[61898]: value = "task-1241284" [ 1084.085881] env[61898]: _type = "Task" [ 1084.085881] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.094261] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.228193] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 466cbf07-e945-48d4-a103-5a3ea2b7adf6] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1084.309635] env[61898]: DEBUG nova.compute.manager [req-08009ed7-6cbb-44dc-b708-e2470d91c343 req-eef9d9d4-70fd-4f28-9e9c-d3e4bf262be1 service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Received event network-vif-deleted-630c2b2d-b17e-470f-ad5f-506c4734d40c {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1084.309922] env[61898]: INFO nova.compute.manager [req-08009ed7-6cbb-44dc-b708-e2470d91c343 req-eef9d9d4-70fd-4f28-9e9c-d3e4bf262be1 service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Neutron deleted interface 630c2b2d-b17e-470f-ad5f-506c4734d40c; detaching it from the instance and deleting it from the info cache [ 1084.310204] env[61898]: DEBUG nova.network.neutron [req-08009ed7-6cbb-44dc-b708-e2470d91c343 req-eef9d9d4-70fd-4f28-9e9c-d3e4bf262be1 service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.565950] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241283, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.597473] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0673} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.597785] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1084.598656] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc303f1-2b36-4601-9b33-7183822c95a4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.622042] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef/456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1084.622413] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-080a6fcf-7b6a-42c5-b7cb-76d5ffb38886 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.644097] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for the task: (returnval){ [ 1084.644097] env[61898]: value = "task-1241286" [ 1084.644097] env[61898]: _type = "Task" [ 1084.644097] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.654186] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241286, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.732073] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 320577e5-f197-4f66-a94f-9b9ba2479325] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1084.785736] env[61898]: DEBUG nova.network.neutron [-] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.813351] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b72be2df-21bb-46e5-82c9-fd840840a5df {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.825723] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc39bb6b-9860-4809-b46f-24abbb59537a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.856999] env[61898]: DEBUG nova.compute.manager [req-08009ed7-6cbb-44dc-b708-e2470d91c343 req-eef9d9d4-70fd-4f28-9e9c-d3e4bf262be1 service nova] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Detach interface failed, port_id=630c2b2d-b17e-470f-ad5f-506c4734d40c, reason: Instance d0184b78-1525-44a4-a515-3eeb34a59cde could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1085.066495] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241283, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.156517] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241286, 'name': ReconfigVM_Task, 'duration_secs': 0.300633} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.156801] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef/456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.157552] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc357b6a-85c2-47b1-8b56-ba47b6a28510 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.167147] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for the task: (returnval){ [ 1085.167147] env[61898]: value = "task-1241287" [ 1085.167147] env[61898]: _type = "Task" [ 1085.167147] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.178058] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241287, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.235889] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 45b8dc91-b577-4548-bf3a-32c7c936c616] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1085.289215] env[61898]: INFO nova.compute.manager [-] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Took 1.45 seconds to deallocate network for instance. [ 1085.566826] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241283, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.678959] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241287, 'name': Rename_Task, 'duration_secs': 0.152928} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.679643] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1085.679826] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53462d18-7193-404f-954e-0bf2af2dbd9a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.689740] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for the task: (returnval){ [ 1085.689740] env[61898]: value = "task-1241288" [ 1085.689740] env[61898]: _type = "Task" [ 1085.689740] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.699342] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241288, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.740166] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: aab10d8f-0d25-4351-a627-7222be63895e] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1085.836976] env[61898]: INFO nova.compute.manager [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: d0184b78-1525-44a4-a515-3eeb34a59cde] Took 0.55 seconds to detach 1 volumes for instance. [ 1086.067619] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241283, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.203514] env[61898]: DEBUG oslo_vmware.api [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241288, 'name': PowerOnVM_Task, 'duration_secs': 0.511673} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.203839] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1086.204084] env[61898]: INFO nova.compute.manager [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Took 7.22 seconds to spawn the instance on the hypervisor. [ 1086.204290] env[61898]: DEBUG nova.compute.manager [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1086.205232] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905aa198-2284-4fcc-bdeb-019a0fd0d7e7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.244367] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 1aa03975-f18f-4e64-836e-e991b73ee9d5] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1086.345418] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1086.345787] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.346113] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.368184] env[61898]: INFO nova.scheduler.client.report [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleted allocations for instance d0184b78-1525-44a4-a515-3eeb34a59cde [ 1086.467571] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Volume attach. Driver type: vmdk {{(pid=61898) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1086.467827] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267735', 'volume_id': 'bdeba398-0cb6-409b-9caf-e9b957918e7d', 'name': 'volume-bdeba398-0cb6-409b-9caf-e9b957918e7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f8b4a587-9ca8-4710-8cf6-3f6ea336185c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bdeba398-0cb6-409b-9caf-e9b957918e7d', 'serial': 'bdeba398-0cb6-409b-9caf-e9b957918e7d'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1086.468755] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a77980-a2e0-4dbe-91c4-1d6ecc4c39fa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.486048] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23245495-6e1c-4c3e-b716-189b05c2d2a2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.509699] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] volume-bdeba398-0cb6-409b-9caf-e9b957918e7d/volume-bdeba398-0cb6-409b-9caf-e9b957918e7d.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.509953] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6af0ef6e-a9b3-4b59-aaf3-22beb06bdbac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.527421] env[61898]: DEBUG oslo_vmware.api [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1086.527421] env[61898]: value = "task-1241289" [ 1086.527421] env[61898]: _type = "Task" [ 1086.527421] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.534686] env[61898]: DEBUG oslo_vmware.api [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241289, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.564311] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241283, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.290551} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.564549] env[61898]: INFO nova.virt.vmwareapi.ds_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_2d08cb66-c77e-4f84-9a25-17125fc4fc71/OSTACK_IMG_2d08cb66-c77e-4f84-9a25-17125fc4fc71.vmdk to [datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f.vmdk. [ 1086.564742] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Cleaning up location [datastore2] OSTACK_IMG_2d08cb66-c77e-4f84-9a25-17125fc4fc71 {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1086.564900] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_2d08cb66-c77e-4f84-9a25-17125fc4fc71 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.565175] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b909e02b-6094-4103-bb88-6427e76b3cf4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.570349] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1086.570349] env[61898]: value = "task-1241290" [ 1086.570349] env[61898]: _type = "Task" [ 1086.570349] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.577672] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.726289] env[61898]: INFO nova.compute.manager [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Took 17.33 seconds to build instance. [ 1086.748076] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 34338563-05d4-477b-8480-6ef4cbf28e72] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1086.876179] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8b5275c5-e439-41ac-a675-30a8238984cf tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "d0184b78-1525-44a4-a515-3eeb34a59cde" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.795s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.037122] env[61898]: DEBUG oslo_vmware.api [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241289, 'name': ReconfigVM_Task, 'duration_secs': 0.377544} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.037416] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Reconfigured VM instance instance-0000006a to attach disk [datastore2] volume-bdeba398-0cb6-409b-9caf-e9b957918e7d/volume-bdeba398-0cb6-409b-9caf-e9b957918e7d.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.042174] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97e02c2a-7d38-4f02-8b97-11199b6b3d91 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.056484] env[61898]: DEBUG oslo_vmware.api [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1087.056484] env[61898]: value = "task-1241291" [ 1087.056484] env[61898]: _type = "Task" [ 1087.056484] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.064060] env[61898]: DEBUG oslo_vmware.api [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241291, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.078672] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.061551} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.078914] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.079116] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.079369] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f.vmdk to [datastore2] 523a29df-e21d-4e38-9437-ebcdd7012f57/523a29df-e21d-4e38-9437-ebcdd7012f57.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1087.079606] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc6ca3f1-8e2a-4574-b6b0-93eed3076ca2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.085990] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1087.085990] env[61898]: value = "task-1241292" [ 1087.085990] env[61898]: _type = "Task" [ 1087.085990] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.093011] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.228764] env[61898]: DEBUG oslo_concurrency.lockutils [None req-842caf1e-61e0-478b-814c-dc54d270bd1c tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.843s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.251526] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 4c744673-0d9b-44ef-938f-372b101a2053] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1087.569511] env[61898]: DEBUG oslo_vmware.api [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241291, 'name': ReconfigVM_Task, 'duration_secs': 0.142665} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.570026] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267735', 'volume_id': 'bdeba398-0cb6-409b-9caf-e9b957918e7d', 'name': 'volume-bdeba398-0cb6-409b-9caf-e9b957918e7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f8b4a587-9ca8-4710-8cf6-3f6ea336185c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bdeba398-0cb6-409b-9caf-e9b957918e7d', 'serial': 'bdeba398-0cb6-409b-9caf-e9b957918e7d'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1087.602681] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241292, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.755708] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 1fb4535d-47d8-45c5-b6d6-d05e57237b98] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1088.091503] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquiring lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.091759] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.091975] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquiring lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.092211] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.092416] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.094820] env[61898]: INFO nova.compute.manager [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Terminating instance [ 1088.101400] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241292, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.259507] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: a0580308-d25b-47cb-9c1c-adb763be7925] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1088.434770] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.434770] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.601332] env[61898]: DEBUG nova.compute.manager [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1088.601587] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1088.601960] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241292, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.602848] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbebeb7-65ea-474a-9c2a-dda4228937b3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.608394] env[61898]: DEBUG nova.objects.instance [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lazy-loading 'flavor' on Instance uuid f8b4a587-9ca8-4710-8cf6-3f6ea336185c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.612989] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1088.613321] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fcae12c-b5b4-48cc-8beb-ff2dfe9f47ed {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.621414] env[61898]: DEBUG oslo_vmware.api [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for the task: (returnval){ [ 1088.621414] env[61898]: value = "task-1241293" [ 1088.621414] env[61898]: _type = "Task" [ 1088.621414] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.632838] env[61898]: DEBUG oslo_vmware.api [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241293, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.763110] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 29eadea9-fa85-4f51-97d0-a941e1658094] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1088.937688] env[61898]: DEBUG nova.compute.manager [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1089.101424] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241292, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.113818] env[61898]: DEBUG oslo_concurrency.lockutils [None req-8a341ce6-1b10-47f4-964e-626ff25387cc tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.254s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.133589] env[61898]: DEBUG oslo_vmware.api [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241293, 'name': PowerOffVM_Task, 'duration_secs': 0.217438} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.133966] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1089.134057] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1089.134326] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f433a8f3-db4e-43e1-aab0-77d39c65f2d1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.207161] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1089.207410] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1089.207602] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Deleting the datastore file [datastore2] 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.207911] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a64d3ae8-3ac3-42d1-b4f2-fcee920a4664 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.217085] env[61898]: DEBUG oslo_vmware.api [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for the task: (returnval){ [ 1089.217085] env[61898]: value = "task-1241295" [ 1089.217085] env[61898]: _type = "Task" [ 1089.217085] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.227353] env[61898]: DEBUG oslo_vmware.api [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.266906] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 9e6a3749-1974-4818-9cc6-76367d41b7e5] Instance has had 0 of 5 cleanup attempts {{(pid=61898) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11570}} [ 1089.462045] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.462405] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.464741] env[61898]: INFO nova.compute.claims [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1089.600975] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241292, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.727232] env[61898]: DEBUG oslo_vmware.api [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.770108] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.770397] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Cleaning up deleted instances with incomplete migration {{(pid=61898) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11595}} [ 1090.098399] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241292, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.627785} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.098689] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f/e6be0329-0c19-4f26-b3b1-5ccaaf3d9b5f.vmdk to [datastore2] 523a29df-e21d-4e38-9437-ebcdd7012f57/523a29df-e21d-4e38-9437-ebcdd7012f57.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1090.099480] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352af41f-132d-4dba-8be6-02bc52e2e48f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.121159] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 523a29df-e21d-4e38-9437-ebcdd7012f57/523a29df-e21d-4e38-9437-ebcdd7012f57.vmdk or device None with type streamOptimized {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1090.121459] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b63576fd-6011-436b-b9e1-65273cb84805 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.140516] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1090.140516] env[61898]: value = "task-1241296" [ 1090.140516] env[61898]: _type = "Task" [ 1090.140516] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.148372] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241296, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.227101] env[61898]: DEBUG oslo_vmware.api [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Task: {'id': task-1241295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.840479} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.227401] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.227567] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1090.227745] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1090.227925] env[61898]: INFO nova.compute.manager [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1090.228214] env[61898]: DEBUG oslo.service.loopingcall [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1090.228424] env[61898]: DEBUG nova.compute.manager [-] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1090.228527] env[61898]: DEBUG nova.network.neutron [-] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1090.273371] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.494623] env[61898]: DEBUG nova.compute.manager [req-f9de414d-af8c-405d-9ec9-3c4337b64cff req-ac7e22e1-8c6d-4d43-a431-9ea8f5a45b52 service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Received event network-vif-deleted-adc0e331-b036-4452-8908-97777cb02434 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1090.494947] env[61898]: INFO nova.compute.manager [req-f9de414d-af8c-405d-9ec9-3c4337b64cff req-ac7e22e1-8c6d-4d43-a431-9ea8f5a45b52 service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Neutron deleted interface adc0e331-b036-4452-8908-97777cb02434; detaching it from the instance and deleting it from the info cache [ 1090.495072] env[61898]: DEBUG nova.network.neutron [req-f9de414d-af8c-405d-9ec9-3c4337b64cff req-ac7e22e1-8c6d-4d43-a431-9ea8f5a45b52 service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.551211] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667caacd-77e8-4b2b-ae0e-e98f4a638891 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.558945] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc3124d-b70d-43b1-8596-27e6799e997d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.588518] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebc43fa-ccca-4cee-af5d-500d12ad1be2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.595721] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c925f370-4168-4785-991f-d2d0d56aa3f3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.608310] env[61898]: DEBUG nova.compute.provider_tree [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.648459] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241296, 'name': ReconfigVM_Task, 'duration_secs': 0.308304} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.648722] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 523a29df-e21d-4e38-9437-ebcdd7012f57/523a29df-e21d-4e38-9437-ebcdd7012f57.vmdk or device None with type streamOptimized {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1090.649512] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c1fcf56-1d49-4602-a63c-1ea946dd88e0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.655313] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1090.655313] env[61898]: value = "task-1241297" [ 1090.655313] env[61898]: _type = "Task" [ 1090.655313] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.662443] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241297, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.756583] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "f8aaed09-5c34-4b17-888c-9066711f4c5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.756855] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8aaed09-5c34-4b17-888c-9066711f4c5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.969457] env[61898]: DEBUG nova.network.neutron [-] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.999953] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d844ea96-c2c6-48f6-b0b6-d3a27b00de36 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.013140] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a087ba-80a9-4862-9792-e979e9453df5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.036833] env[61898]: DEBUG nova.compute.manager [req-f9de414d-af8c-405d-9ec9-3c4337b64cff req-ac7e22e1-8c6d-4d43-a431-9ea8f5a45b52 service nova] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Detach interface failed, port_id=adc0e331-b036-4452-8908-97777cb02434, reason: Instance 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1091.111784] env[61898]: DEBUG nova.scheduler.client.report [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1091.165385] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241297, 'name': Rename_Task, 'duration_secs': 0.126783} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.165662] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1091.165905] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7c06cf2-2e65-49b2-b7de-5978b4842f23 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.171871] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1091.171871] env[61898]: value = "task-1241298" [ 1091.171871] env[61898]: _type = "Task" [ 1091.171871] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.179038] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241298, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.258905] env[61898]: DEBUG nova.compute.manager [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1091.472658] env[61898]: INFO nova.compute.manager [-] [instance: 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef] Took 1.24 seconds to deallocate network for instance. [ 1091.617161] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.155s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.617715] env[61898]: DEBUG nova.compute.manager [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1091.681996] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241298, 'name': PowerOnVM_Task} progress is 89%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.783184] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.783527] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.785108] env[61898]: INFO nova.compute.claims [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.979888] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.123368] env[61898]: DEBUG nova.compute.utils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1092.124851] env[61898]: DEBUG nova.compute.manager [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1092.125047] env[61898]: DEBUG nova.network.neutron [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1092.164899] env[61898]: DEBUG nova.policy [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ce8ddf4b7fe4e0583f09e7f88ab5e70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '975e564bd7f442629018b97007460e00', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 1092.183651] env[61898]: DEBUG oslo_vmware.api [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241298, 'name': PowerOnVM_Task, 'duration_secs': 0.741841} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.184165] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1092.296130] env[61898]: DEBUG nova.compute.manager [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1092.297061] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ae2939-48aa-44de-b355-8a04b5f9b737 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.465885] env[61898]: DEBUG nova.network.neutron [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Successfully created port: 0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1092.628247] env[61898]: DEBUG nova.compute.manager [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1092.818449] env[61898]: DEBUG oslo_concurrency.lockutils [None req-100996f3-dddc-488d-bc99-6defd741ede2 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 24.512s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1092.876038] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9eeb93-8f11-495a-ba3f-7460eed61418 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.885925] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2d0ef8-2572-4adc-a42d-2963f8f8e040 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.918415] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa4f6be-bcee-4804-9a3e-a0bafbd96af7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.925924] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0e98c0-577c-4232-9714-c31630adc2fa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.939534] env[61898]: DEBUG nova.compute.provider_tree [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.443292] env[61898]: DEBUG nova.scheduler.client.report [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1093.639275] env[61898]: DEBUG nova.compute.manager [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1093.663944] env[61898]: DEBUG nova.virt.hardware [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1093.664213] env[61898]: DEBUG nova.virt.hardware [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1093.664377] env[61898]: DEBUG nova.virt.hardware [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1093.664568] env[61898]: DEBUG nova.virt.hardware [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1093.664716] env[61898]: DEBUG nova.virt.hardware [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1093.664864] env[61898]: DEBUG nova.virt.hardware [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1093.665080] env[61898]: DEBUG nova.virt.hardware [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1093.665244] env[61898]: DEBUG nova.virt.hardware [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1093.665409] env[61898]: DEBUG nova.virt.hardware [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1093.665572] env[61898]: DEBUG nova.virt.hardware [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1093.665743] env[61898]: DEBUG nova.virt.hardware [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1093.666629] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9415b5b4-fd97-4e01-8db9-3b35eb8092e9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.674489] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d56706-ad9b-4f2a-a75e-327c68bd0cb2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.834055] env[61898]: DEBUG nova.compute.manager [req-d26c86f7-095c-4034-9495-626c7b51739a req-cb759e98-ccd0-4593-84dc-152b31b98b60 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Received event network-vif-plugged-0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1093.834321] env[61898]: DEBUG oslo_concurrency.lockutils [req-d26c86f7-095c-4034-9495-626c7b51739a req-cb759e98-ccd0-4593-84dc-152b31b98b60 service nova] Acquiring lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.834550] env[61898]: DEBUG oslo_concurrency.lockutils [req-d26c86f7-095c-4034-9495-626c7b51739a req-cb759e98-ccd0-4593-84dc-152b31b98b60 service nova] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.834728] env[61898]: DEBUG oslo_concurrency.lockutils [req-d26c86f7-095c-4034-9495-626c7b51739a req-cb759e98-ccd0-4593-84dc-152b31b98b60 service nova] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.834886] env[61898]: DEBUG nova.compute.manager [req-d26c86f7-095c-4034-9495-626c7b51739a req-cb759e98-ccd0-4593-84dc-152b31b98b60 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] No waiting events found dispatching network-vif-plugged-0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1093.835070] env[61898]: WARNING nova.compute.manager [req-d26c86f7-095c-4034-9495-626c7b51739a req-cb759e98-ccd0-4593-84dc-152b31b98b60 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Received unexpected event network-vif-plugged-0a571ec3-5857-4620-a92f-26e53930943f for instance with vm_state building and task_state spawning. [ 1093.948162] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.164s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.948868] env[61898]: DEBUG nova.compute.manager [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1093.951753] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.972s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.952050] env[61898]: DEBUG nova.objects.instance [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Lazy-loading 'resources' on Instance uuid 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.393251] env[61898]: DEBUG nova.network.neutron [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Successfully updated port: 0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1094.444974] env[61898]: DEBUG nova.compute.manager [req-a735cdd5-1133-4e48-ba92-8c0d22ffdc5b req-65c9a58f-0dab-41d6-809b-d7e8e1ed85d0 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Received event network-changed-0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1094.445218] env[61898]: DEBUG nova.compute.manager [req-a735cdd5-1133-4e48-ba92-8c0d22ffdc5b req-65c9a58f-0dab-41d6-809b-d7e8e1ed85d0 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Refreshing instance network info cache due to event network-changed-0a571ec3-5857-4620-a92f-26e53930943f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1094.445345] env[61898]: DEBUG oslo_concurrency.lockutils [req-a735cdd5-1133-4e48-ba92-8c0d22ffdc5b req-65c9a58f-0dab-41d6-809b-d7e8e1ed85d0 service nova] Acquiring lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1094.445516] env[61898]: DEBUG oslo_concurrency.lockutils [req-a735cdd5-1133-4e48-ba92-8c0d22ffdc5b req-65c9a58f-0dab-41d6-809b-d7e8e1ed85d0 service nova] Acquired lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.445705] env[61898]: DEBUG nova.network.neutron [req-a735cdd5-1133-4e48-ba92-8c0d22ffdc5b req-65c9a58f-0dab-41d6-809b-d7e8e1ed85d0 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Refreshing network info cache for port 0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1094.456125] env[61898]: DEBUG nova.compute.utils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1094.460396] env[61898]: DEBUG nova.compute.manager [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1094.460747] env[61898]: DEBUG nova.network.neutron [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1094.505313] env[61898]: DEBUG nova.policy [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b53a1aca504e4b7593420e25dd8602f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00a5473d225540e186d6778172a187cb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 1094.530715] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8908977-5b2f-4636-99cb-4bb308e302cf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.538864] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22a81d1-ba7a-4f9c-a17c-a9ea8ee3788b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.571668] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b74634d-fc18-41a5-9523-c1a0b888dfc2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.579360] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ebd778-67a7-4b6f-99cb-2d3f91e70a84 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.592696] env[61898]: DEBUG nova.compute.provider_tree [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.819021] env[61898]: DEBUG nova.network.neutron [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Successfully created port: fee847b9-7507-43d1-8739-5163f56c1ce9 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1094.896062] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1094.961293] env[61898]: DEBUG nova.compute.manager [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1094.984910] env[61898]: DEBUG nova.network.neutron [req-a735cdd5-1133-4e48-ba92-8c0d22ffdc5b req-65c9a58f-0dab-41d6-809b-d7e8e1ed85d0 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1095.061781] env[61898]: DEBUG nova.network.neutron [req-a735cdd5-1133-4e48-ba92-8c0d22ffdc5b req-65c9a58f-0dab-41d6-809b-d7e8e1ed85d0 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.095231] env[61898]: DEBUG nova.scheduler.client.report [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1095.565638] env[61898]: DEBUG oslo_concurrency.lockutils [req-a735cdd5-1133-4e48-ba92-8c0d22ffdc5b req-65c9a58f-0dab-41d6-809b-d7e8e1ed85d0 service nova] Releasing lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.565638] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.565638] env[61898]: DEBUG nova.network.neutron [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1095.601708] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.648s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.624059] env[61898]: INFO nova.scheduler.client.report [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Deleted allocations for instance 456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef [ 1095.972035] env[61898]: DEBUG nova.compute.manager [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1095.996255] env[61898]: DEBUG nova.virt.hardware [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1095.996564] env[61898]: DEBUG nova.virt.hardware [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1095.996729] env[61898]: DEBUG nova.virt.hardware [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1095.996913] env[61898]: DEBUG nova.virt.hardware [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1095.997079] env[61898]: DEBUG nova.virt.hardware [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1095.997237] env[61898]: DEBUG nova.virt.hardware [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1095.997474] env[61898]: DEBUG nova.virt.hardware [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1095.997602] env[61898]: DEBUG nova.virt.hardware [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1095.997770] env[61898]: DEBUG nova.virt.hardware [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1095.997936] env[61898]: DEBUG nova.virt.hardware [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1095.998126] env[61898]: DEBUG nova.virt.hardware [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1095.999089] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3068eb-831f-41fe-9133-98a088e495b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.007162] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca938f6a-8a72-4f48-96b9-400c9c254ca7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.096526] env[61898]: DEBUG nova.network.neutron [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1096.136472] env[61898]: DEBUG oslo_concurrency.lockutils [None req-e9fa6f98-d9d6-4791-9113-a0aae8afde53 tempest-ServerAddressesTestJSON-368889151 tempest-ServerAddressesTestJSON-368889151-project-member] Lock "456f3b8e-8f5d-4e17-b2e3-15f76b01e0ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.045s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.232802] env[61898]: DEBUG nova.network.neutron [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updating instance_info_cache with network_info: [{"id": "0a571ec3-5857-4620-a92f-26e53930943f", "address": "fa:16:3e:f6:8b:ff", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a571ec3-58", "ovs_interfaceid": "0a571ec3-5857-4620-a92f-26e53930943f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.735618] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.736042] env[61898]: DEBUG nova.compute.manager [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Instance network_info: |[{"id": "0a571ec3-5857-4620-a92f-26e53930943f", "address": "fa:16:3e:f6:8b:ff", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a571ec3-58", "ovs_interfaceid": "0a571ec3-5857-4620-a92f-26e53930943f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1096.736836] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:8b:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a571ec3-5857-4620-a92f-26e53930943f', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1096.744456] env[61898]: DEBUG oslo.service.loopingcall [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1096.744700] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1096.744929] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-127ff4e2-87e1-4746-b834-7544a758a17b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.766803] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1096.766803] env[61898]: value = "task-1241299" [ 1096.766803] env[61898]: _type = "Task" [ 1096.766803] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.774792] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241299, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.790340] env[61898]: DEBUG nova.compute.manager [req-b601d0c9-d216-40f7-bc78-6763ce339ffb req-5356935c-d0ae-41fc-b92a-bb16ceb8bbd8 service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Received event network-vif-plugged-fee847b9-7507-43d1-8739-5163f56c1ce9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1096.790601] env[61898]: DEBUG oslo_concurrency.lockutils [req-b601d0c9-d216-40f7-bc78-6763ce339ffb req-5356935c-d0ae-41fc-b92a-bb16ceb8bbd8 service nova] Acquiring lock "f8aaed09-5c34-4b17-888c-9066711f4c5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.790817] env[61898]: DEBUG oslo_concurrency.lockutils [req-b601d0c9-d216-40f7-bc78-6763ce339ffb req-5356935c-d0ae-41fc-b92a-bb16ceb8bbd8 service nova] Lock "f8aaed09-5c34-4b17-888c-9066711f4c5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.790988] env[61898]: DEBUG oslo_concurrency.lockutils [req-b601d0c9-d216-40f7-bc78-6763ce339ffb req-5356935c-d0ae-41fc-b92a-bb16ceb8bbd8 service nova] Lock "f8aaed09-5c34-4b17-888c-9066711f4c5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.791187] env[61898]: DEBUG nova.compute.manager [req-b601d0c9-d216-40f7-bc78-6763ce339ffb req-5356935c-d0ae-41fc-b92a-bb16ceb8bbd8 service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] No waiting events found dispatching network-vif-plugged-fee847b9-7507-43d1-8739-5163f56c1ce9 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1096.791436] env[61898]: WARNING nova.compute.manager [req-b601d0c9-d216-40f7-bc78-6763ce339ffb req-5356935c-d0ae-41fc-b92a-bb16ceb8bbd8 service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Received unexpected event network-vif-plugged-fee847b9-7507-43d1-8739-5163f56c1ce9 for instance with vm_state building and task_state spawning. [ 1097.277602] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241299, 'name': CreateVM_Task, 'duration_secs': 0.299051} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.277775] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1097.278532] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.280143] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.280143] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1097.280143] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23e20b05-fc4d-4a31-80d1-fe9c7b0d1218 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.283725] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1097.283725] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d28084-7e90-8590-f40b-26bcd29d4122" [ 1097.283725] env[61898]: _type = "Task" [ 1097.283725] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.291144] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d28084-7e90-8590-f40b-26bcd29d4122, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.367559] env[61898]: DEBUG nova.network.neutron [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Successfully updated port: fee847b9-7507-43d1-8739-5163f56c1ce9 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1097.394434] env[61898]: DEBUG nova.compute.manager [req-01c4025d-294e-4eac-8182-0eab4c339265 req-3e6a06a7-e35d-4a10-92a4-c5745f2dcc8a service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Received event network-changed-fee847b9-7507-43d1-8739-5163f56c1ce9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1097.394709] env[61898]: DEBUG nova.compute.manager [req-01c4025d-294e-4eac-8182-0eab4c339265 req-3e6a06a7-e35d-4a10-92a4-c5745f2dcc8a service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Refreshing instance network info cache due to event network-changed-fee847b9-7507-43d1-8739-5163f56c1ce9. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1097.394831] env[61898]: DEBUG oslo_concurrency.lockutils [req-01c4025d-294e-4eac-8182-0eab4c339265 req-3e6a06a7-e35d-4a10-92a4-c5745f2dcc8a service nova] Acquiring lock "refresh_cache-f8aaed09-5c34-4b17-888c-9066711f4c5a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.394998] env[61898]: DEBUG oslo_concurrency.lockutils [req-01c4025d-294e-4eac-8182-0eab4c339265 req-3e6a06a7-e35d-4a10-92a4-c5745f2dcc8a service nova] Acquired lock "refresh_cache-f8aaed09-5c34-4b17-888c-9066711f4c5a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.395185] env[61898]: DEBUG nova.network.neutron [req-01c4025d-294e-4eac-8182-0eab4c339265 req-3e6a06a7-e35d-4a10-92a4-c5745f2dcc8a service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Refreshing network info cache for port fee847b9-7507-43d1-8739-5163f56c1ce9 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1097.793978] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d28084-7e90-8590-f40b-26bcd29d4122, 'name': SearchDatastore_Task, 'duration_secs': 0.010217} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.794199] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1097.794438] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1097.794693] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.794821] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.796036] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1097.796036] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46ab4749-374a-4678-9c2d-7b34ea61c6f4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.804059] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1097.804157] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1097.804865] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5300dedd-97ff-4d70-a224-e4394f25fdd9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.817252] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1097.817252] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52f040a2-759f-f870-d832-5c260f81dee1" [ 1097.817252] env[61898]: _type = "Task" [ 1097.817252] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.827491] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52f040a2-759f-f870-d832-5c260f81dee1, 'name': SearchDatastore_Task, 'duration_secs': 0.009009} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.828269] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75c70d16-2059-48d7-917b-8e10d8078d9b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.833040] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1097.833040] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5221c3f0-b016-328a-c8af-8d3bf1943031" [ 1097.833040] env[61898]: _type = "Task" [ 1097.833040] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.841535] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5221c3f0-b016-328a-c8af-8d3bf1943031, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.870339] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "refresh_cache-f8aaed09-5c34-4b17-888c-9066711f4c5a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1097.930679] env[61898]: DEBUG nova.network.neutron [req-01c4025d-294e-4eac-8182-0eab4c339265 req-3e6a06a7-e35d-4a10-92a4-c5745f2dcc8a service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1098.014055] env[61898]: DEBUG nova.network.neutron [req-01c4025d-294e-4eac-8182-0eab4c339265 req-3e6a06a7-e35d-4a10-92a4-c5745f2dcc8a service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.345150] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5221c3f0-b016-328a-c8af-8d3bf1943031, 'name': SearchDatastore_Task, 'duration_secs': 0.007966} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.345759] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.345759] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 06f718b8-2433-4eb5-8a62-9e4c79e78e63/06f718b8-2433-4eb5-8a62-9e4c79e78e63.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1098.345932] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6316e9f3-2225-41ea-b028-1e60d78260f1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.352774] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1098.352774] env[61898]: value = "task-1241300" [ 1098.352774] env[61898]: _type = "Task" [ 1098.352774] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.360166] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241300, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.518962] env[61898]: DEBUG oslo_concurrency.lockutils [req-01c4025d-294e-4eac-8182-0eab4c339265 req-3e6a06a7-e35d-4a10-92a4-c5745f2dcc8a service nova] Releasing lock "refresh_cache-f8aaed09-5c34-4b17-888c-9066711f4c5a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.519355] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "refresh_cache-f8aaed09-5c34-4b17-888c-9066711f4c5a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.519542] env[61898]: DEBUG nova.network.neutron [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1098.770104] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.770104] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.861945] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241300, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.430556} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.862296] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] 06f718b8-2433-4eb5-8a62-9e4c79e78e63/06f718b8-2433-4eb5-8a62-9e4c79e78e63.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1098.862426] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1098.862663] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c697d7e8-4f51-415a-aad6-bbf8e42bdd83 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.868628] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1098.868628] env[61898]: value = "task-1241301" [ 1098.868628] env[61898]: _type = "Task" [ 1098.868628] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.876121] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241301, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.052136] env[61898]: DEBUG nova.network.neutron [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1099.179758] env[61898]: DEBUG nova.network.neutron [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Updating instance_info_cache with network_info: [{"id": "fee847b9-7507-43d1-8739-5163f56c1ce9", "address": "fa:16:3e:fe:8c:74", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfee847b9-75", "ovs_interfaceid": "fee847b9-7507-43d1-8739-5163f56c1ce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.274713] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1099.274913] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1099.274982] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Rebuilding the list of instances to heal {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 1099.378184] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241301, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068574} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.378435] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1099.379196] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d9f354-4dda-42c7-b8f3-da539ce84816 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.400729] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 06f718b8-2433-4eb5-8a62-9e4c79e78e63/06f718b8-2433-4eb5-8a62-9e4c79e78e63.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1099.401024] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6f60ca5-2d0b-4de9-bc4d-ded8b04fc77c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.420123] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1099.420123] env[61898]: value = "task-1241302" [ 1099.420123] env[61898]: _type = "Task" [ 1099.420123] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.427463] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241302, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.682796] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "refresh_cache-f8aaed09-5c34-4b17-888c-9066711f4c5a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.683089] env[61898]: DEBUG nova.compute.manager [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Instance network_info: |[{"id": "fee847b9-7507-43d1-8739-5163f56c1ce9", "address": "fa:16:3e:fe:8c:74", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfee847b9-75", "ovs_interfaceid": "fee847b9-7507-43d1-8739-5163f56c1ce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1099.683579] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:8c:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '418ddd3d-5f64-407e-8e0c-c8b81639bee9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fee847b9-7507-43d1-8739-5163f56c1ce9', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1099.690999] env[61898]: DEBUG oslo.service.loopingcall [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1099.691254] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1099.691482] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3260f601-507b-43e7-8884-828ad3499610 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.710748] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1099.710748] env[61898]: value = "task-1241303" [ 1099.710748] env[61898]: _type = "Task" [ 1099.710748] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.717705] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241303, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.778503] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 1099.778696] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Skipping network cache update for instance because it is Building. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10270}} [ 1099.806360] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.806530] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquired lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.806711] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Forcefully refreshing network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1099.806921] env[61898]: DEBUG nova.objects.instance [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lazy-loading 'info_cache' on Instance uuid 523a29df-e21d-4e38-9437-ebcdd7012f57 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1099.932420] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241302, 'name': ReconfigVM_Task, 'duration_secs': 0.465547} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.932788] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 06f718b8-2433-4eb5-8a62-9e4c79e78e63/06f718b8-2433-4eb5-8a62-9e4c79e78e63.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1099.933388] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc20d81a-629b-45f3-9549-32b3eab713d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.939990] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1099.939990] env[61898]: value = "task-1241304" [ 1099.939990] env[61898]: _type = "Task" [ 1099.939990] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.948566] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241304, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.220929] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241303, 'name': CreateVM_Task, 'duration_secs': 0.398572} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.221120] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1100.221839] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.222034] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.222361] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1100.222620] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e99846ae-d393-4979-958c-15b16d022a7a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.226812] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1100.226812] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]527873be-87bd-92a4-3c67-4fc17cf35f63" [ 1100.226812] env[61898]: _type = "Task" [ 1100.226812] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.235094] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527873be-87bd-92a4-3c67-4fc17cf35f63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.450421] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241304, 'name': Rename_Task, 'duration_secs': 0.27133} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.450842] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1100.451059] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a6da07a-3c86-41ad-924b-134024078be3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.456704] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1100.456704] env[61898]: value = "task-1241305" [ 1100.456704] env[61898]: _type = "Task" [ 1100.456704] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.463961] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241305, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.737776] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]527873be-87bd-92a4-3c67-4fc17cf35f63, 'name': SearchDatastore_Task, 'duration_secs': 0.010045} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.738315] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.738744] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1100.739132] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.739512] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.739815] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1100.740185] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c1c3d17-46d8-4da2-9993-baaa08d30719 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.748218] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1100.748321] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1100.748948] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-627b6142-d33b-481c-9db4-19f2d71ff14f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.753654] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1100.753654] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525bca98-340c-7c0c-f285-81ca9e17aae0" [ 1100.753654] env[61898]: _type = "Task" [ 1100.753654] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.760732] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525bca98-340c-7c0c-f285-81ca9e17aae0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.966575] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241305, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.263878] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]525bca98-340c-7c0c-f285-81ca9e17aae0, 'name': SearchDatastore_Task, 'duration_secs': 0.009728} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.264714] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce4cf69b-2c10-43bf-8eaa-5c4bcd3a87a0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.269775] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1101.269775] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5230e601-6094-140c-8890-0f8134c3b2b7" [ 1101.269775] env[61898]: _type = "Task" [ 1101.269775] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.277946] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5230e601-6094-140c-8890-0f8134c3b2b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.469748] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241305, 'name': PowerOnVM_Task} progress is 66%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.527975] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updating instance_info_cache with network_info: [{"id": "fc927434-188b-4c42-9200-bcb870385a25", "address": "fa:16:3e:f0:b0:60", "network": {"id": "fd85e8a1-e7ca-494c-b5a7-2514e7bce7b2", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1542774828-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6198f817d1b471483500fe05c9bef3f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4f4d0bb-61eb-4597-bc00-c9fdbc85f93d", "external-id": "nsx-vlan-transportzone-470", "segmentation_id": 470, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc927434-18", "ovs_interfaceid": "fc927434-188b-4c42-9200-bcb870385a25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.780507] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5230e601-6094-140c-8890-0f8134c3b2b7, 'name': SearchDatastore_Task, 'duration_secs': 0.008896} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.780748] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.781029] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] f8aaed09-5c34-4b17-888c-9066711f4c5a/f8aaed09-5c34-4b17-888c-9066711f4c5a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1101.781291] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3d94981-c215-4793-9e66-6554be8d0bd0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.789108] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1101.789108] env[61898]: value = "task-1241306" [ 1101.789108] env[61898]: _type = "Task" [ 1101.789108] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.796784] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241306, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.970547] env[61898]: DEBUG oslo_vmware.api [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241305, 'name': PowerOnVM_Task, 'duration_secs': 1.160839} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.971016] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1101.971242] env[61898]: INFO nova.compute.manager [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Took 8.33 seconds to spawn the instance on the hypervisor. [ 1101.971298] env[61898]: DEBUG nova.compute.manager [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1101.972184] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96388114-e76c-4ddf-8012-fce4f430fddf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.031918] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Releasing lock "refresh_cache-523a29df-e21d-4e38-9437-ebcdd7012f57" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1102.031918] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updated the network info_cache for instance {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 1102.031918] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.298483] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241306, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.417805} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.300043] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore1] f8aaed09-5c34-4b17-888c-9066711f4c5a/f8aaed09-5c34-4b17-888c-9066711f4c5a.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1102.300043] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1102.300043] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e480807b-3fc0-4a7d-960f-59be814ffe75 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.305269] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1102.305269] env[61898]: value = "task-1241307" [ 1102.305269] env[61898]: _type = "Task" [ 1102.305269] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.312979] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241307, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.489519] env[61898]: INFO nova.compute.manager [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Took 13.05 seconds to build instance. [ 1102.815065] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241307, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057791} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.815631] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1102.816513] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a700ec-ba08-4221-a4cd-962f96888c23 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.838862] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] f8aaed09-5c34-4b17-888c-9066711f4c5a/f8aaed09-5c34-4b17-888c-9066711f4c5a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1102.839360] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7971fde-3324-4de0-93a1-42877f8d284c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.859228] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1102.859228] env[61898]: value = "task-1241308" [ 1102.859228] env[61898]: _type = "Task" [ 1102.859228] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.866879] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241308, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.889259] env[61898]: DEBUG nova.compute.manager [req-3fdf78c1-3033-4b2c-bb96-02c822424bb1 req-ddd922d0-afc7-460a-9426-880cb8896bee service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Received event network-changed-0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1102.889460] env[61898]: DEBUG nova.compute.manager [req-3fdf78c1-3033-4b2c-bb96-02c822424bb1 req-ddd922d0-afc7-460a-9426-880cb8896bee service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Refreshing instance network info cache due to event network-changed-0a571ec3-5857-4620-a92f-26e53930943f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1102.889719] env[61898]: DEBUG oslo_concurrency.lockutils [req-3fdf78c1-3033-4b2c-bb96-02c822424bb1 req-ddd922d0-afc7-460a-9426-880cb8896bee service nova] Acquiring lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.889895] env[61898]: DEBUG oslo_concurrency.lockutils [req-3fdf78c1-3033-4b2c-bb96-02c822424bb1 req-ddd922d0-afc7-460a-9426-880cb8896bee service nova] Acquired lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.890127] env[61898]: DEBUG nova.network.neutron [req-3fdf78c1-3033-4b2c-bb96-02c822424bb1 req-ddd922d0-afc7-460a-9426-880cb8896bee service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Refreshing network info cache for port 0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1102.992090] env[61898]: DEBUG oslo_concurrency.lockutils [None req-78d09d31-e635-4d6c-b1c8-7c78a7c6f95a tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.557s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.368467] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241308, 'name': ReconfigVM_Task, 'duration_secs': 0.494166} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.368812] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Reconfigured VM instance instance-0000006e to attach disk [datastore1] f8aaed09-5c34-4b17-888c-9066711f4c5a/f8aaed09-5c34-4b17-888c-9066711f4c5a.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1103.369453] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ceab7cd4-0413-4432-ab31-041dda0f1e80 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.375139] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1103.375139] env[61898]: value = "task-1241309" [ 1103.375139] env[61898]: _type = "Task" [ 1103.375139] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.385950] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241309, 'name': Rename_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.606456] env[61898]: DEBUG nova.network.neutron [req-3fdf78c1-3033-4b2c-bb96-02c822424bb1 req-ddd922d0-afc7-460a-9426-880cb8896bee service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updated VIF entry in instance network info cache for port 0a571ec3-5857-4620-a92f-26e53930943f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1103.606864] env[61898]: DEBUG nova.network.neutron [req-3fdf78c1-3033-4b2c-bb96-02c822424bb1 req-ddd922d0-afc7-460a-9426-880cb8896bee service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updating instance_info_cache with network_info: [{"id": "0a571ec3-5857-4620-a92f-26e53930943f", "address": "fa:16:3e:f6:8b:ff", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a571ec3-58", "ovs_interfaceid": "0a571ec3-5857-4620-a92f-26e53930943f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.884343] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241309, 'name': Rename_Task, 'duration_secs': 0.136856} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.884608] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1103.884847] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-587098e5-2e12-4a7f-a817-7afc6cf1894c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.890866] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1103.890866] env[61898]: value = "task-1241310" [ 1103.890866] env[61898]: _type = "Task" [ 1103.890866] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.897692] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241310, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.109518] env[61898]: DEBUG oslo_concurrency.lockutils [req-3fdf78c1-3033-4b2c-bb96-02c822424bb1 req-ddd922d0-afc7-460a-9426-880cb8896bee service nova] Releasing lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1104.401014] env[61898]: DEBUG oslo_vmware.api [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241310, 'name': PowerOnVM_Task, 'duration_secs': 0.418739} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.401312] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1104.401526] env[61898]: INFO nova.compute.manager [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Took 8.43 seconds to spawn the instance on the hypervisor. [ 1104.401776] env[61898]: DEBUG nova.compute.manager [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1104.402567] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330c8479-209d-44f1-bf49-581cdbddf813 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.923136] env[61898]: INFO nova.compute.manager [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Took 13.16 seconds to build instance. [ 1105.180539] env[61898]: DEBUG nova.compute.manager [req-d483a972-16cf-4f22-a000-83e951809d1e req-943e021e-601c-4b81-ae31-be0081de190c service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Received event network-changed-fee847b9-7507-43d1-8739-5163f56c1ce9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1105.180802] env[61898]: DEBUG nova.compute.manager [req-d483a972-16cf-4f22-a000-83e951809d1e req-943e021e-601c-4b81-ae31-be0081de190c service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Refreshing instance network info cache due to event network-changed-fee847b9-7507-43d1-8739-5163f56c1ce9. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1105.180993] env[61898]: DEBUG oslo_concurrency.lockutils [req-d483a972-16cf-4f22-a000-83e951809d1e req-943e021e-601c-4b81-ae31-be0081de190c service nova] Acquiring lock "refresh_cache-f8aaed09-5c34-4b17-888c-9066711f4c5a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1105.181156] env[61898]: DEBUG oslo_concurrency.lockutils [req-d483a972-16cf-4f22-a000-83e951809d1e req-943e021e-601c-4b81-ae31-be0081de190c service nova] Acquired lock "refresh_cache-f8aaed09-5c34-4b17-888c-9066711f4c5a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.181321] env[61898]: DEBUG nova.network.neutron [req-d483a972-16cf-4f22-a000-83e951809d1e req-943e021e-601c-4b81-ae31-be0081de190c service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Refreshing network info cache for port fee847b9-7507-43d1-8739-5163f56c1ce9 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1105.423634] env[61898]: DEBUG oslo_concurrency.lockutils [None req-015d158f-ad2f-4298-9c98-be20993a70a4 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8aaed09-5c34-4b17-888c-9066711f4c5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.667s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1105.890082] env[61898]: DEBUG nova.network.neutron [req-d483a972-16cf-4f22-a000-83e951809d1e req-943e021e-601c-4b81-ae31-be0081de190c service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Updated VIF entry in instance network info cache for port fee847b9-7507-43d1-8739-5163f56c1ce9. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1105.890462] env[61898]: DEBUG nova.network.neutron [req-d483a972-16cf-4f22-a000-83e951809d1e req-943e021e-601c-4b81-ae31-be0081de190c service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Updating instance_info_cache with network_info: [{"id": "fee847b9-7507-43d1-8739-5163f56c1ce9", "address": "fa:16:3e:fe:8c:74", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfee847b9-75", "ovs_interfaceid": "fee847b9-7507-43d1-8739-5163f56c1ce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.393688] env[61898]: DEBUG oslo_concurrency.lockutils [req-d483a972-16cf-4f22-a000-83e951809d1e req-943e021e-601c-4b81-ae31-be0081de190c service nova] Releasing lock "refresh_cache-f8aaed09-5c34-4b17-888c-9066711f4c5a" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1115.594544] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.594352] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.594616] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.098313] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.098564] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1117.098729] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.098888] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1117.099829] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32005a00-4d02-4565-b57d-e1f4a63ec1e5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.108187] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2b86df-7221-4f21-966a-48387e96169f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.121915] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46a278e-f530-47ca-a228-6809ee44fec0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.127904] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4651082d-099f-4e52-9096-5237b743da16 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.155686] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181292MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1117.155837] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1117.156017] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1118.182414] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance f8b4a587-9ca8-4710-8cf6-3f6ea336185c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1118.182717] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 523a29df-e21d-4e38-9437-ebcdd7012f57 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1118.182717] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 06f718b8-2433-4eb5-8a62-9e4c79e78e63 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1118.182805] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance f8aaed09-5c34-4b17-888c-9066711f4c5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1118.182963] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1118.183119] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1118.234805] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3eeda1f-f12a-4d49-8cfe-133bb0a37b47 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.242099] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b864643-c1b0-41f4-8ee1-cb7a7b759487 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.271747] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84b4f5e-6313-4e49-b654-9fe18c4f5aa8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.278378] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028c6095-826b-4ef7-b36c-306bfd8c7cfa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.292133] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1118.811490] env[61898]: ERROR nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [req-735b3743-49a2-4928-a5be-8b637ad2495a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 79886f75-94e9-4bf0-9cbd-87f3715d3144. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-735b3743-49a2-4928-a5be-8b637ad2495a"}]} [ 1118.826821] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Refreshing inventories for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1118.839146] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating ProviderTree inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1118.839368] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1118.850659] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Refreshing aggregate associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, aggregates: None {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1118.866404] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Refreshing trait associations for resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=61898) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1118.914530] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bafd6c-04c0-41c4-86f3-1d34649a0cae {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.921601] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b490c74-de52-40a5-8260-2f3d9ade135e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.950686] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dde870a-8b6b-484c-b18c-90a860465465 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.957570] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3ca7bd-8fa3-4679-99a1-ddbaf276b5fb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.970089] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1119.498576] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updated inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with generation 141 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1119.498867] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 generation from 141 to 142 during operation: update_inventory {{(pid=61898) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1119.498969] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1120.003424] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1120.003657] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.848s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.004937] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.005239] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.005404] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1121.584991] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1121.585212] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquired lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1121.585365] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Forcefully refreshing network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1122.959929] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Updating instance_info_cache with network_info: [{"id": "bd9582d2-676b-45d5-be6a-1883dd40c2ff", "address": "fa:16:3e:24:f1:a9", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd9582d2-67", "ovs_interfaceid": "bd9582d2-676b-45d5-be6a-1883dd40c2ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.463046] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Releasing lock "refresh_cache-f8b4a587-9ca8-4710-8cf6-3f6ea336185c" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1123.463294] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Updated the network info_cache for instance {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 1123.463550] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1123.463772] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1123.463933] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1123.464096] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1123.464228] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61898) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1126.079508] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.583025] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Getting list of instances from cluster (obj){ [ 1126.583025] env[61898]: value = "domain-c8" [ 1126.583025] env[61898]: _type = "ClusterComputeResource" [ 1126.583025] env[61898]: } {{(pid=61898) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1126.584143] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39eaa21b-b88a-4998-88d9-bb7ce4b36d2a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.597299] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Got total of 4 instances {{(pid=61898) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1126.597488] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid 523a29df-e21d-4e38-9437-ebcdd7012f57 {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 1126.597684] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid f8b4a587-9ca8-4710-8cf6-3f6ea336185c {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 1126.597843] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid 06f718b8-2433-4eb5-8a62-9e4c79e78e63 {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 1126.597994] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Triggering sync for uuid f8aaed09-5c34-4b17-888c-9066711f4c5a {{(pid=61898) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10667}} [ 1126.598320] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "523a29df-e21d-4e38-9437-ebcdd7012f57" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.598543] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.598792] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.598978] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.599228] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.599431] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.599669] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "f8aaed09-5c34-4b17-888c-9066711f4c5a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.599854] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "f8aaed09-5c34-4b17-888c-9066711f4c5a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.600636] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405436ae-58d5-43c7-a73c-2f3e3d8601bd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.603523] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d389b57c-4fd0-4d55-ac1d-716f9672d017 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.606084] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102fbc79-10bb-48c4-8ad4-a2b04e98ecdb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.609898] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e593552-11fa-47a8-ab5e-456d65e9d696 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.124520] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.525s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.124990] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.526s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.125243] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.526s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.125547] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "f8aaed09-5c34-4b17-888c-9066711f4c5a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.526s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.011878] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "523a29df-e21d-4e38-9437-ebcdd7012f57" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.012360] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.012426] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.012637] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.012818] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1129.016050] env[61898]: INFO nova.compute.manager [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Terminating instance [ 1129.519435] env[61898]: DEBUG nova.compute.manager [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1129.519692] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1129.520622] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743add29-a359-4106-b371-e950433c8f02 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.529941] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1129.530203] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9da6f4c3-bf4f-474f-af44-c4ab2730de79 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.536353] env[61898]: DEBUG oslo_vmware.api [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1129.536353] env[61898]: value = "task-1241311" [ 1129.536353] env[61898]: _type = "Task" [ 1129.536353] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.543737] env[61898]: DEBUG oslo_vmware.api [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241311, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.046129] env[61898]: DEBUG oslo_vmware.api [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241311, 'name': PowerOffVM_Task, 'duration_secs': 0.179129} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.046501] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1130.046668] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1130.046818] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4160fc4e-bfaa-4d69-94c1-8a88b4004f4e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.103657] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1130.103937] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1130.104117] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleting the datastore file [datastore2] 523a29df-e21d-4e38-9437-ebcdd7012f57 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1130.104393] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fb31d34-7f8e-40b1-982e-78a2b81f1aef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.110273] env[61898]: DEBUG oslo_vmware.api [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for the task: (returnval){ [ 1130.110273] env[61898]: value = "task-1241313" [ 1130.110273] env[61898]: _type = "Task" [ 1130.110273] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.117274] env[61898]: DEBUG oslo_vmware.api [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241313, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.619737] env[61898]: DEBUG oslo_vmware.api [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Task: {'id': task-1241313, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141881} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.620026] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1130.620204] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1130.620383] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1130.620560] env[61898]: INFO nova.compute.manager [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1130.620797] env[61898]: DEBUG oslo.service.loopingcall [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1130.620993] env[61898]: DEBUG nova.compute.manager [-] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1130.621149] env[61898]: DEBUG nova.network.neutron [-] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1131.131236] env[61898]: DEBUG nova.compute.manager [req-7d058f61-4657-4e9b-b29f-c86394d685a5 req-51377898-3d3b-4115-87b0-5e2ec93bb57a service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Received event network-vif-deleted-fc927434-188b-4c42-9200-bcb870385a25 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1131.131475] env[61898]: INFO nova.compute.manager [req-7d058f61-4657-4e9b-b29f-c86394d685a5 req-51377898-3d3b-4115-87b0-5e2ec93bb57a service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Neutron deleted interface fc927434-188b-4c42-9200-bcb870385a25; detaching it from the instance and deleting it from the info cache [ 1131.131635] env[61898]: DEBUG nova.network.neutron [req-7d058f61-4657-4e9b-b29f-c86394d685a5 req-51377898-3d3b-4115-87b0-5e2ec93bb57a service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.573884] env[61898]: DEBUG nova.network.neutron [-] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.634037] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dbaf3685-25b3-42be-a853-69ce57ad0479 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.645066] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd0c5c4-4988-4e33-863a-ee59e81dec1b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.669651] env[61898]: DEBUG nova.compute.manager [req-7d058f61-4657-4e9b-b29f-c86394d685a5 req-51377898-3d3b-4115-87b0-5e2ec93bb57a service nova] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Detach interface failed, port_id=fc927434-188b-4c42-9200-bcb870385a25, reason: Instance 523a29df-e21d-4e38-9437-ebcdd7012f57 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1132.077423] env[61898]: INFO nova.compute.manager [-] [instance: 523a29df-e21d-4e38-9437-ebcdd7012f57] Took 1.46 seconds to deallocate network for instance. [ 1132.584817] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.585202] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.585398] env[61898]: DEBUG nova.objects.instance [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lazy-loading 'resources' on Instance uuid 523a29df-e21d-4e38-9437-ebcdd7012f57 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.146936] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be09149-2719-4e86-9a80-ba480d3f16ee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.154480] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ff5734-a710-4d4d-af3f-06db5243190f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.186168] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a1150d4-d17c-4624-942f-457546b1e223 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.193324] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a89af8c-43e9-4ff1-9b20-41482a619fa7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.206485] env[61898]: DEBUG nova.compute.provider_tree [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1133.735443] env[61898]: DEBUG nova.scheduler.client.report [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Updated inventory for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with generation 142 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1133.735760] env[61898]: DEBUG nova.compute.provider_tree [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Updating resource provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 generation from 142 to 143 during operation: update_inventory {{(pid=61898) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1133.735898] env[61898]: DEBUG nova.compute.provider_tree [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Updating inventory in ProviderTree for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1134.241057] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.655s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1134.261444] env[61898]: INFO nova.scheduler.client.report [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Deleted allocations for instance 523a29df-e21d-4e38-9437-ebcdd7012f57 [ 1134.771443] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1f99bb18-effa-48a6-8733-e4f84d785120 tempest-AttachVolumeShelveTestJSON-656296135 tempest-AttachVolumeShelveTestJSON-656296135-project-member] Lock "523a29df-e21d-4e38-9437-ebcdd7012f57" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.759s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.228049] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.228049] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1139.228049] env[61898]: INFO nova.compute.manager [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Shelving [ 1140.239798] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1140.240606] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b68e1bda-c7c5-4967-b205-e0c23d464f54 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.247033] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1140.247033] env[61898]: value = "task-1241315" [ 1140.247033] env[61898]: _type = "Task" [ 1140.247033] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.255092] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241315, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.758034] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241315, 'name': PowerOffVM_Task, 'duration_secs': 0.171632} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.758034] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1140.758471] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c731eb66-345b-4427-929d-555678612a99 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.777663] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e790f551-da03-47f9-a8bf-a06d9cdac984 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.287921] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Creating Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1141.288286] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1d37ce54-7aa6-4027-9ca2-788d95686be6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.296162] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1141.296162] env[61898]: value = "task-1241316" [ 1141.296162] env[61898]: _type = "Task" [ 1141.296162] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.304947] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241316, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.806458] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241316, 'name': CreateSnapshot_Task, 'duration_secs': 0.419572} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.806837] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Created Snapshot of the VM instance {{(pid=61898) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1141.807567] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc91d22a-f4f4-4404-a0ec-91372d15bbb9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.324912] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Creating linked-clone VM from snapshot {{(pid=61898) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1142.325287] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-138cab2d-be77-4993-b439-5f8fb63766f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.333499] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1142.333499] env[61898]: value = "task-1241317" [ 1142.333499] env[61898]: _type = "Task" [ 1142.333499] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.341219] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241317, 'name': CloneVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.844496] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241317, 'name': CloneVM_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.283400] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "f8aaed09-5c34-4b17-888c-9066711f4c5a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.283741] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8aaed09-5c34-4b17-888c-9066711f4c5a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.284459] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "f8aaed09-5c34-4b17-888c-9066711f4c5a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.284459] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8aaed09-5c34-4b17-888c-9066711f4c5a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.284459] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8aaed09-5c34-4b17-888c-9066711f4c5a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.286939] env[61898]: INFO nova.compute.manager [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Terminating instance [ 1143.344420] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241317, 'name': CloneVM_Task} progress is 95%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.791082] env[61898]: DEBUG nova.compute.manager [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1143.791346] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1143.792316] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9629dd6-9ef2-4f23-bf65-ae6d01ffbccb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.800084] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1143.800321] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba910082-d229-4810-91ba-1be2762075f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.806050] env[61898]: DEBUG oslo_vmware.api [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1143.806050] env[61898]: value = "task-1241318" [ 1143.806050] env[61898]: _type = "Task" [ 1143.806050] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.813474] env[61898]: DEBUG oslo_vmware.api [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.843572] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241317, 'name': CloneVM_Task, 'duration_secs': 1.110107} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.843847] env[61898]: INFO nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Created linked-clone VM from snapshot [ 1143.844600] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18198be7-d7c3-423a-a478-2a97f94d2087 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.851488] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Uploading image 0e0e658f-de9b-4b8d-99b5-38dc75cf53a2 {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1143.877010] env[61898]: DEBUG oslo_vmware.rw_handles [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1143.877010] env[61898]: value = "vm-267739" [ 1143.877010] env[61898]: _type = "VirtualMachine" [ 1143.877010] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1143.877296] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6cc50152-360b-4191-87ae-a5daa8d140a3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.884553] env[61898]: DEBUG oslo_vmware.rw_handles [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lease: (returnval){ [ 1143.884553] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5250507c-430c-d9b6-76be-4f7c528cea58" [ 1143.884553] env[61898]: _type = "HttpNfcLease" [ 1143.884553] env[61898]: } obtained for exporting VM: (result){ [ 1143.884553] env[61898]: value = "vm-267739" [ 1143.884553] env[61898]: _type = "VirtualMachine" [ 1143.884553] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1143.884825] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the lease: (returnval){ [ 1143.884825] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5250507c-430c-d9b6-76be-4f7c528cea58" [ 1143.884825] env[61898]: _type = "HttpNfcLease" [ 1143.884825] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1143.890927] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1143.890927] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5250507c-430c-d9b6-76be-4f7c528cea58" [ 1143.890927] env[61898]: _type = "HttpNfcLease" [ 1143.890927] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1144.315594] env[61898]: DEBUG oslo_vmware.api [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241318, 'name': PowerOffVM_Task, 'duration_secs': 0.209463} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.315881] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1144.316068] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1144.316312] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2499ef29-0da6-4f98-b095-29b555261275 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.392674] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1144.392674] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5250507c-430c-d9b6-76be-4f7c528cea58" [ 1144.392674] env[61898]: _type = "HttpNfcLease" [ 1144.392674] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1144.392977] env[61898]: DEBUG oslo_vmware.rw_handles [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1144.392977] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5250507c-430c-d9b6-76be-4f7c528cea58" [ 1144.392977] env[61898]: _type = "HttpNfcLease" [ 1144.392977] env[61898]: }. {{(pid=61898) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1144.393675] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b2c10c-8f04-4ae9-bbb0-28895bfc5a0c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.400468] env[61898]: DEBUG oslo_vmware.rw_handles [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262107b-64ee-17ff-f32c-829beee27018/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1144.400644] env[61898]: DEBUG oslo_vmware.rw_handles [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262107b-64ee-17ff-f32c-829beee27018/disk-0.vmdk for reading. {{(pid=61898) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1144.457850] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1144.458101] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1144.458303] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleting the datastore file [datastore1] f8aaed09-5c34-4b17-888c-9066711f4c5a {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1144.458570] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-393213e1-a378-445a-9acb-9a21d0d11ac1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.465765] env[61898]: DEBUG oslo_vmware.api [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1144.465765] env[61898]: value = "task-1241321" [ 1144.465765] env[61898]: _type = "Task" [ 1144.465765] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.473657] env[61898]: DEBUG oslo_vmware.api [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.492953] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fd38eede-aca9-4093-bfe3-2adf130b23c2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.975903] env[61898]: DEBUG oslo_vmware.api [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145097} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.976255] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1144.976514] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1144.976743] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1144.977047] env[61898]: INFO nova.compute.manager [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1144.977311] env[61898]: DEBUG oslo.service.loopingcall [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1144.977579] env[61898]: DEBUG nova.compute.manager [-] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1144.977744] env[61898]: DEBUG nova.network.neutron [-] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1145.459206] env[61898]: DEBUG nova.compute.manager [req-7f2a558e-3dc4-4c09-a6ad-6293d4c204ed req-8b2c840c-e526-4d16-89e5-a2d73b06c1e0 service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Received event network-vif-deleted-fee847b9-7507-43d1-8739-5163f56c1ce9 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1145.459565] env[61898]: INFO nova.compute.manager [req-7f2a558e-3dc4-4c09-a6ad-6293d4c204ed req-8b2c840c-e526-4d16-89e5-a2d73b06c1e0 service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Neutron deleted interface fee847b9-7507-43d1-8739-5163f56c1ce9; detaching it from the instance and deleting it from the info cache [ 1145.459940] env[61898]: DEBUG nova.network.neutron [req-7f2a558e-3dc4-4c09-a6ad-6293d4c204ed req-8b2c840c-e526-4d16-89e5-a2d73b06c1e0 service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.935646] env[61898]: DEBUG nova.network.neutron [-] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.962380] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-06f1213b-da46-46bd-82c7-5e89da10cd7f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.972161] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c9b03a-7fb0-4270-8357-8aa8e0b14357 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.996696] env[61898]: DEBUG nova.compute.manager [req-7f2a558e-3dc4-4c09-a6ad-6293d4c204ed req-8b2c840c-e526-4d16-89e5-a2d73b06c1e0 service nova] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Detach interface failed, port_id=fee847b9-7507-43d1-8739-5163f56c1ce9, reason: Instance f8aaed09-5c34-4b17-888c-9066711f4c5a could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1146.438977] env[61898]: INFO nova.compute.manager [-] [instance: f8aaed09-5c34-4b17-888c-9066711f4c5a] Took 1.46 seconds to deallocate network for instance. [ 1146.946074] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.946488] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.946597] env[61898]: DEBUG nova.objects.instance [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lazy-loading 'resources' on Instance uuid f8aaed09-5c34-4b17-888c-9066711f4c5a {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.503491] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a3fbcb-6720-47c6-a0c8-419cdb89f095 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.511495] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e46fe8-0813-41d4-8835-8da501e9f2b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.540999] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a56a46-4787-4281-bd39-7087b59f3811 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.548733] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d265316e-77b2-4d9d-a357-198ec37ee209 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.563625] env[61898]: DEBUG nova.compute.provider_tree [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.066732] env[61898]: DEBUG nova.scheduler.client.report [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1148.571960] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.625s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.591532] env[61898]: INFO nova.scheduler.client.report [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleted allocations for instance f8aaed09-5c34-4b17-888c-9066711f4c5a [ 1149.099798] env[61898]: DEBUG oslo_concurrency.lockutils [None req-d3b06e12-f513-4495-9033-660ed3764358 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8aaed09-5c34-4b17-888c-9066711f4c5a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.816s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.537356] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.537765] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.040571] env[61898]: INFO nova.compute.manager [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Detaching volume bdeba398-0cb6-409b-9caf-e9b957918e7d [ 1151.073658] env[61898]: INFO nova.virt.block_device [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Attempting to driver detach volume bdeba398-0cb6-409b-9caf-e9b957918e7d from mountpoint /dev/sdb [ 1151.074036] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Volume detach. Driver type: vmdk {{(pid=61898) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1151.074322] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267735', 'volume_id': 'bdeba398-0cb6-409b-9caf-e9b957918e7d', 'name': 'volume-bdeba398-0cb6-409b-9caf-e9b957918e7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f8b4a587-9ca8-4710-8cf6-3f6ea336185c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bdeba398-0cb6-409b-9caf-e9b957918e7d', 'serial': 'bdeba398-0cb6-409b-9caf-e9b957918e7d'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1151.075559] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4993f256-289f-4a3f-a1a7-db98263182e5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.100774] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa5074f-89fb-4ea7-92ec-569d89742ecb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.108280] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ab2663-0aee-43e0-af53-d866896faaf2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.133294] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9c1fdc-2bf9-441e-bf83-5b3454ced2f9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.148740] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] The volume has not been displaced from its original location: [datastore2] volume-bdeba398-0cb6-409b-9caf-e9b957918e7d/volume-bdeba398-0cb6-409b-9caf-e9b957918e7d.vmdk. No consolidation needed. {{(pid=61898) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1151.154098] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1151.154429] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f03020db-ccbb-46d3-9f39-00842ca5f9d1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.172549] env[61898]: DEBUG oslo_vmware.api [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1151.172549] env[61898]: value = "task-1241322" [ 1151.172549] env[61898]: _type = "Task" [ 1151.172549] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.180662] env[61898]: DEBUG oslo_vmware.api [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241322, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.681791] env[61898]: DEBUG oslo_vmware.api [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241322, 'name': ReconfigVM_Task, 'duration_secs': 0.227437} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.682288] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1151.686858] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a645193-82be-411c-a307-3c23633deb2a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.702826] env[61898]: DEBUG oslo_vmware.api [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1151.702826] env[61898]: value = "task-1241323" [ 1151.702826] env[61898]: _type = "Task" [ 1151.702826] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.711172] env[61898]: DEBUG oslo_vmware.api [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241323, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.212217] env[61898]: DEBUG oslo_vmware.api [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241323, 'name': ReconfigVM_Task, 'duration_secs': 0.200547} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.212558] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267735', 'volume_id': 'bdeba398-0cb6-409b-9caf-e9b957918e7d', 'name': 'volume-bdeba398-0cb6-409b-9caf-e9b957918e7d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f8b4a587-9ca8-4710-8cf6-3f6ea336185c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bdeba398-0cb6-409b-9caf-e9b957918e7d', 'serial': 'bdeba398-0cb6-409b-9caf-e9b957918e7d'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1152.754608] env[61898]: DEBUG nova.objects.instance [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lazy-loading 'flavor' on Instance uuid f8b4a587-9ca8-4710-8cf6-3f6ea336185c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.255975] env[61898]: DEBUG oslo_vmware.rw_handles [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262107b-64ee-17ff-f32c-829beee27018/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1153.256963] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1046e4f9-636b-475f-8683-0ae8fe453653 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.265879] env[61898]: DEBUG oslo_vmware.rw_handles [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262107b-64ee-17ff-f32c-829beee27018/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1153.266124] env[61898]: ERROR oslo_vmware.rw_handles [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262107b-64ee-17ff-f32c-829beee27018/disk-0.vmdk due to incomplete transfer. [ 1153.266339] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6dc98de4-4a6f-40ce-aa2b-33c1c95f0c88 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.274609] env[61898]: DEBUG oslo_vmware.rw_handles [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5262107b-64ee-17ff-f32c-829beee27018/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1153.274843] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Uploaded image 0e0e658f-de9b-4b8d-99b5-38dc75cf53a2 to the Glance image server {{(pid=61898) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1153.277456] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Destroying the VM {{(pid=61898) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1153.278254] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a7dfddfa-a660-45ce-9dce-41634bbe3784 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.284143] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1153.284143] env[61898]: value = "task-1241324" [ 1153.284143] env[61898]: _type = "Task" [ 1153.284143] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.291487] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241324, 'name': Destroy_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.765630] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c406b410-976c-43c5-a8ab-85cef1a0fa19 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.228s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.793563] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241324, 'name': Destroy_Task, 'duration_secs': 0.287744} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.793813] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Destroyed the VM [ 1153.794059] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Deleting Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1153.794302] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-759c6d48-21bd-4d60-840f-052b4b0b2462 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.799689] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1153.799689] env[61898]: value = "task-1241325" [ 1153.799689] env[61898]: _type = "Task" [ 1153.799689] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.807017] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241325, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.310199] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241325, 'name': RemoveSnapshot_Task, 'duration_secs': 0.317562} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.310480] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Deleted Snapshot of the VM instance {{(pid=61898) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1154.310748] env[61898]: DEBUG nova.compute.manager [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1154.311745] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7478f9c8-7ec1-4568-a0b1-0322f8a3216c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.780603] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.781038] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.781109] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1154.781303] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.781475] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.783776] env[61898]: INFO nova.compute.manager [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Terminating instance [ 1154.822991] env[61898]: INFO nova.compute.manager [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Shelve offloading [ 1155.287753] env[61898]: DEBUG nova.compute.manager [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1155.288017] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1155.288900] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be402de-f493-4643-a798-f49cda345267 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.296598] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1155.296824] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c14d65c-ca30-4356-af43-ce4456af4d07 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.303800] env[61898]: DEBUG oslo_vmware.api [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1155.303800] env[61898]: value = "task-1241326" [ 1155.303800] env[61898]: _type = "Task" [ 1155.303800] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.311388] env[61898]: DEBUG oslo_vmware.api [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241326, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.326056] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1155.326324] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d9172bd-1b96-4467-b7ed-7d90a0c3cb7b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.332083] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1155.332083] env[61898]: value = "task-1241327" [ 1155.332083] env[61898]: _type = "Task" [ 1155.332083] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.339870] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241327, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.813271] env[61898]: DEBUG oslo_vmware.api [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241326, 'name': PowerOffVM_Task, 'duration_secs': 0.185073} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.813636] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1155.813731] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1155.813978] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17b74af0-d516-4fed-8c3f-5ec27fabab11 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.841093] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] VM already powered off {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1155.841301] env[61898]: DEBUG nova.compute.manager [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1155.842126] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc102ac2-9215-47fc-80a3-6261268b6eb5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.847544] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1155.847713] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.847886] env[61898]: DEBUG nova.network.neutron [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1155.874433] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1155.874650] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1155.874835] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleting the datastore file [datastore2] f8b4a587-9ca8-4710-8cf6-3f6ea336185c {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1155.875109] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a12c2259-f5aa-4835-b8d4-c2ec4d417e57 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.880409] env[61898]: DEBUG oslo_vmware.api [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1155.880409] env[61898]: value = "task-1241329" [ 1155.880409] env[61898]: _type = "Task" [ 1155.880409] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.888085] env[61898]: DEBUG oslo_vmware.api [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241329, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.390142] env[61898]: DEBUG oslo_vmware.api [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241329, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12858} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.390443] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.390631] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1156.390815] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1156.390992] env[61898]: INFO nova.compute.manager [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1156.391259] env[61898]: DEBUG oslo.service.loopingcall [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1156.391456] env[61898]: DEBUG nova.compute.manager [-] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1156.391548] env[61898]: DEBUG nova.network.neutron [-] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1156.556902] env[61898]: DEBUG nova.network.neutron [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updating instance_info_cache with network_info: [{"id": "0a571ec3-5857-4620-a92f-26e53930943f", "address": "fa:16:3e:f6:8b:ff", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a571ec3-58", "ovs_interfaceid": "0a571ec3-5857-4620-a92f-26e53930943f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.969293] env[61898]: DEBUG nova.compute.manager [req-2ee47706-9c4e-4751-b3cf-aaeebab135ca req-184234fe-2257-4627-9607-e9c8a19e4c59 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Received event network-vif-deleted-bd9582d2-676b-45d5-be6a-1883dd40c2ff {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1156.969293] env[61898]: INFO nova.compute.manager [req-2ee47706-9c4e-4751-b3cf-aaeebab135ca req-184234fe-2257-4627-9607-e9c8a19e4c59 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Neutron deleted interface bd9582d2-676b-45d5-be6a-1883dd40c2ff; detaching it from the instance and deleting it from the info cache [ 1156.969293] env[61898]: DEBUG nova.network.neutron [req-2ee47706-9c4e-4751-b3cf-aaeebab135ca req-184234fe-2257-4627-9607-e9c8a19e4c59 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.060175] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.347603] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1157.348537] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9611325-f542-4696-8e69-83cfa58c02a4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.356246] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1157.356472] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1ebe826-167f-42af-930d-7e7380924e60 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.382710] env[61898]: DEBUG nova.network.neutron [-] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.426091] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1157.426396] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Deleting contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1157.426639] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleting the datastore file [datastore1] 06f718b8-2433-4eb5-8a62-9e4c79e78e63 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1157.426939] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2147097f-0152-43f0-918a-b1caf52b93d5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.433349] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1157.433349] env[61898]: value = "task-1241331" [ 1157.433349] env[61898]: _type = "Task" [ 1157.433349] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.440989] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241331, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.472361] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-50b7bfe8-4942-4a5a-9ca1-5d2ae7a0f537 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.483395] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d100724-3481-4128-8206-dd54030e67b6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.505639] env[61898]: DEBUG nova.compute.manager [req-2ee47706-9c4e-4751-b3cf-aaeebab135ca req-184234fe-2257-4627-9607-e9c8a19e4c59 service nova] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Detach interface failed, port_id=bd9582d2-676b-45d5-be6a-1883dd40c2ff, reason: Instance f8b4a587-9ca8-4710-8cf6-3f6ea336185c could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1157.885730] env[61898]: INFO nova.compute.manager [-] [instance: f8b4a587-9ca8-4710-8cf6-3f6ea336185c] Took 1.49 seconds to deallocate network for instance. [ 1157.943444] env[61898]: DEBUG oslo_vmware.api [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241331, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127291} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.943644] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1157.943824] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Deleted contents of the VM from datastore datastore1 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1157.944010] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1157.964222] env[61898]: INFO nova.scheduler.client.report [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleted allocations for instance 06f718b8-2433-4eb5-8a62-9e4c79e78e63 [ 1158.392508] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.392929] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.393031] env[61898]: DEBUG nova.objects.instance [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lazy-loading 'resources' on Instance uuid f8b4a587-9ca8-4710-8cf6-3f6ea336185c {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.468221] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.920913] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82f0b86-6ce1-4310-89b2-cb95d4fce4af {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.928447] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3a736d-7067-4e61-acce-c610713bcb6c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.959625] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be7182c-7f07-454e-9927-2d47ef846845 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.966572] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e434cb07-f259-46bb-8874-de5434c1f383 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.980464] env[61898]: DEBUG nova.compute.provider_tree [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1158.994122] env[61898]: DEBUG nova.compute.manager [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Received event network-vif-unplugged-0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1158.995064] env[61898]: DEBUG oslo_concurrency.lockutils [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] Acquiring lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.995329] env[61898]: DEBUG oslo_concurrency.lockutils [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.995507] env[61898]: DEBUG oslo_concurrency.lockutils [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.995681] env[61898]: DEBUG nova.compute.manager [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] No waiting events found dispatching network-vif-unplugged-0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1158.995901] env[61898]: WARNING nova.compute.manager [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Received unexpected event network-vif-unplugged-0a571ec3-5857-4620-a92f-26e53930943f for instance with vm_state shelved_offloaded and task_state None. [ 1158.996735] env[61898]: DEBUG nova.compute.manager [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Received event network-changed-0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1158.996735] env[61898]: DEBUG nova.compute.manager [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Refreshing instance network info cache due to event network-changed-0a571ec3-5857-4620-a92f-26e53930943f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1158.996735] env[61898]: DEBUG oslo_concurrency.lockutils [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] Acquiring lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1158.996735] env[61898]: DEBUG oslo_concurrency.lockutils [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] Acquired lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.996735] env[61898]: DEBUG nova.network.neutron [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Refreshing network info cache for port 0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1159.482708] env[61898]: DEBUG nova.scheduler.client.report [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1159.706293] env[61898]: DEBUG nova.network.neutron [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updated VIF entry in instance network info cache for port 0a571ec3-5857-4620-a92f-26e53930943f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1159.706656] env[61898]: DEBUG nova.network.neutron [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updating instance_info_cache with network_info: [{"id": "0a571ec3-5857-4620-a92f-26e53930943f", "address": "fa:16:3e:f6:8b:ff", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": null, "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap0a571ec3-58", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.987874] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.595s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1159.990530] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.522s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1159.990772] env[61898]: DEBUG nova.objects.instance [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'resources' on Instance uuid 06f718b8-2433-4eb5-8a62-9e4c79e78e63 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1160.009530] env[61898]: INFO nova.scheduler.client.report [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleted allocations for instance f8b4a587-9ca8-4710-8cf6-3f6ea336185c [ 1160.209434] env[61898]: DEBUG oslo_concurrency.lockutils [req-e9d8f4ea-e171-4528-9f6a-2b59b848bbb1 req-10292de8-3bf9-455b-b3af-2337079af907 service nova] Releasing lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1160.297231] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1160.493046] env[61898]: DEBUG nova.objects.instance [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'numa_topology' on Instance uuid 06f718b8-2433-4eb5-8a62-9e4c79e78e63 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1160.517291] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a1bdf0da-eb5a-4a1e-b83f-12ca12ad018e tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "f8b4a587-9ca8-4710-8cf6-3f6ea336185c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.736s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1160.995367] env[61898]: DEBUG nova.objects.base [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Object Instance<06f718b8-2433-4eb5-8a62-9e4c79e78e63> lazy-loaded attributes: resources,numa_topology {{(pid=61898) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1161.031136] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d705696a-4c87-4507-bd44-f3620d8acb32 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.038707] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22a3b2d-2795-40e8-a4c6-ddb8ca637280 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.069492] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe89d7f-5898-4f07-9d47-60a3ea1a08c6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.077182] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93aab94c-05b0-4f9c-9471-3af2435b201e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.090897] env[61898]: DEBUG nova.compute.provider_tree [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1161.593992] env[61898]: DEBUG nova.scheduler.client.report [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1162.098955] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.108s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.608176] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c102881c-8cb5-4cb9-8709-e6c32f8400bc tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.380s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.608775] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.312s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.609007] env[61898]: INFO nova.compute.manager [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Unshelving [ 1162.958519] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.958759] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.461132] env[61898]: DEBUG nova.compute.manager [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1163.633992] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1163.634292] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.634513] env[61898]: DEBUG nova.objects.instance [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'pci_requests' on Instance uuid 06f718b8-2433-4eb5-8a62-9e4c79e78e63 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1163.983853] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1164.138582] env[61898]: DEBUG nova.objects.instance [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'numa_topology' on Instance uuid 06f718b8-2433-4eb5-8a62-9e4c79e78e63 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1164.641793] env[61898]: INFO nova.compute.claims [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1165.685794] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9a32ab-3fe3-4f73-92e0-105bfc11f41a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.693021] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61c0c7a-a713-4234-9145-19518015654c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.722994] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc2b87a-c435-48d2-b4cc-339c81575014 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.729430] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0341876-bf38-42c2-8321-f6c830be26aa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.741630] env[61898]: DEBUG nova.compute.provider_tree [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.244992] env[61898]: DEBUG nova.scheduler.client.report [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1166.750898] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.116s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.753181] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.769s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.755099] env[61898]: INFO nova.compute.claims [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1166.783573] env[61898]: INFO nova.network.neutron [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updating port 0a571ec3-5857-4620-a92f-26e53930943f with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1167.804823] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0125fb50-8b49-4c82-bdbe-7086581efaaf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.812094] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e02cc8e-57b5-45fe-bcfb-f8dd9736380c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.841083] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33bc592-24a9-49f5-af38-8cf3322efcac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.847917] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1faca8e-6f43-4b38-b520-c5da22eb148b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.861922] env[61898]: DEBUG nova.compute.provider_tree [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.236414] env[61898]: DEBUG nova.compute.manager [req-2f6b70fe-8ac2-4a84-af76-834f3c5fcea7 req-bfb4af5f-8ea3-4723-aa6c-d8e925974983 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Received event network-vif-plugged-0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1168.236549] env[61898]: DEBUG oslo_concurrency.lockutils [req-2f6b70fe-8ac2-4a84-af76-834f3c5fcea7 req-bfb4af5f-8ea3-4723-aa6c-d8e925974983 service nova] Acquiring lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.236660] env[61898]: DEBUG oslo_concurrency.lockutils [req-2f6b70fe-8ac2-4a84-af76-834f3c5fcea7 req-bfb4af5f-8ea3-4723-aa6c-d8e925974983 service nova] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.236812] env[61898]: DEBUG oslo_concurrency.lockutils [req-2f6b70fe-8ac2-4a84-af76-834f3c5fcea7 req-bfb4af5f-8ea3-4723-aa6c-d8e925974983 service nova] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.236979] env[61898]: DEBUG nova.compute.manager [req-2f6b70fe-8ac2-4a84-af76-834f3c5fcea7 req-bfb4af5f-8ea3-4723-aa6c-d8e925974983 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] No waiting events found dispatching network-vif-plugged-0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1168.237319] env[61898]: WARNING nova.compute.manager [req-2f6b70fe-8ac2-4a84-af76-834f3c5fcea7 req-bfb4af5f-8ea3-4723-aa6c-d8e925974983 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Received unexpected event network-vif-plugged-0a571ec3-5857-4620-a92f-26e53930943f for instance with vm_state shelved_offloaded and task_state spawning. [ 1168.344130] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1168.344417] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.344645] env[61898]: DEBUG nova.network.neutron [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1168.365118] env[61898]: DEBUG nova.scheduler.client.report [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1168.870502] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.117s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.870918] env[61898]: DEBUG nova.compute.manager [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1169.046629] env[61898]: DEBUG nova.network.neutron [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updating instance_info_cache with network_info: [{"id": "0a571ec3-5857-4620-a92f-26e53930943f", "address": "fa:16:3e:f6:8b:ff", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a571ec3-58", "ovs_interfaceid": "0a571ec3-5857-4620-a92f-26e53930943f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.375868] env[61898]: DEBUG nova.compute.utils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1169.377440] env[61898]: DEBUG nova.compute.manager [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1169.377639] env[61898]: DEBUG nova.network.neutron [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1169.413396] env[61898]: DEBUG nova.policy [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b53a1aca504e4b7593420e25dd8602f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00a5473d225540e186d6778172a187cb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 1169.550156] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.599795] env[61898]: DEBUG nova.virt.hardware [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='d1121631c2aa87bfc7663f3bff93d1fb',container_format='bare',created_at=2024-10-10T12:05:08Z,direct_url=,disk_format='vmdk',id=0e0e658f-de9b-4b8d-99b5-38dc75cf53a2,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1855277316-shelved',owner='975e564bd7f442629018b97007460e00',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2024-10-10T12:05:23Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1169.600067] env[61898]: DEBUG nova.virt.hardware [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1169.600240] env[61898]: DEBUG nova.virt.hardware [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1169.600432] env[61898]: DEBUG nova.virt.hardware [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1169.600585] env[61898]: DEBUG nova.virt.hardware [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1169.600737] env[61898]: DEBUG nova.virt.hardware [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1169.600944] env[61898]: DEBUG nova.virt.hardware [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1169.601125] env[61898]: DEBUG nova.virt.hardware [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1169.601300] env[61898]: DEBUG nova.virt.hardware [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1169.601468] env[61898]: DEBUG nova.virt.hardware [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1169.601646] env[61898]: DEBUG nova.virt.hardware [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1169.602530] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0a67a3-be21-4388-9cee-9422fc85ebaa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.610208] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43070e36-05e6-4d0e-8435-88c5b0b5a74a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.623213] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:8b:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69054a13-b7ef-44e1-bd3b-3ca5ba602848', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0a571ec3-5857-4620-a92f-26e53930943f', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1169.630365] env[61898]: DEBUG oslo.service.loopingcall [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1169.630603] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1169.630807] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07146889-c35e-4107-ab96-4ec1d92c318c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.649179] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1169.649179] env[61898]: value = "task-1241332" [ 1169.649179] env[61898]: _type = "Task" [ 1169.649179] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.656067] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241332, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.687911] env[61898]: DEBUG nova.network.neutron [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Successfully created port: bb86ac8b-814c-4582-9f52-4470a92d1327 {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1169.881027] env[61898]: DEBUG nova.compute.manager [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1170.160616] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241332, 'name': CreateVM_Task, 'duration_secs': 0.294285} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.160809] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1170.161397] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.161598] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.161987] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1170.162264] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3a7c4b2-d8e1-4180-bc8f-ee65b73165be {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.167211] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1170.167211] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7d363-9c0d-d3c9-95f0-535b97de5807" [ 1170.167211] env[61898]: _type = "Task" [ 1170.167211] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.174470] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e7d363-9c0d-d3c9-95f0-535b97de5807, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.260486] env[61898]: DEBUG nova.compute.manager [req-f8195cb9-58d4-4427-84db-ea1db209f8a9 req-24d52b25-1a59-4425-a3ee-df01fb6d0fb1 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Received event network-changed-0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1170.260724] env[61898]: DEBUG nova.compute.manager [req-f8195cb9-58d4-4427-84db-ea1db209f8a9 req-24d52b25-1a59-4425-a3ee-df01fb6d0fb1 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Refreshing instance network info cache due to event network-changed-0a571ec3-5857-4620-a92f-26e53930943f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1170.260909] env[61898]: DEBUG oslo_concurrency.lockutils [req-f8195cb9-58d4-4427-84db-ea1db209f8a9 req-24d52b25-1a59-4425-a3ee-df01fb6d0fb1 service nova] Acquiring lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.261162] env[61898]: DEBUG oslo_concurrency.lockutils [req-f8195cb9-58d4-4427-84db-ea1db209f8a9 req-24d52b25-1a59-4425-a3ee-df01fb6d0fb1 service nova] Acquired lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.261354] env[61898]: DEBUG nova.network.neutron [req-f8195cb9-58d4-4427-84db-ea1db209f8a9 req-24d52b25-1a59-4425-a3ee-df01fb6d0fb1 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Refreshing network info cache for port 0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1170.677513] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1170.677727] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Processing image 0e0e658f-de9b-4b8d-99b5-38dc75cf53a2 {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1170.677964] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1170.678130] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.678313] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1170.678832] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd62326f-5a8a-4b1a-a76d-440e9a46aaf3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.686837] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1170.687024] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1170.687688] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45e0d191-8a73-44cc-bc45-6dc766e18267 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.692654] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1170.692654] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52fd47c2-0e3d-533c-1dc2-b50622cc9243" [ 1170.692654] env[61898]: _type = "Task" [ 1170.692654] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.700299] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52fd47c2-0e3d-533c-1dc2-b50622cc9243, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.891647] env[61898]: DEBUG nova.compute.manager [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1170.913327] env[61898]: DEBUG nova.virt.hardware [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1170.913632] env[61898]: DEBUG nova.virt.hardware [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1170.913803] env[61898]: DEBUG nova.virt.hardware [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1170.913994] env[61898]: DEBUG nova.virt.hardware [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1170.914194] env[61898]: DEBUG nova.virt.hardware [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1170.914372] env[61898]: DEBUG nova.virt.hardware [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1170.914586] env[61898]: DEBUG nova.virt.hardware [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1170.914750] env[61898]: DEBUG nova.virt.hardware [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1170.914917] env[61898]: DEBUG nova.virt.hardware [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1170.915101] env[61898]: DEBUG nova.virt.hardware [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1170.915279] env[61898]: DEBUG nova.virt.hardware [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1170.916132] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d655dc82-9231-4f21-b9c5-86761d306892 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.923742] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87edc620-5e15-408e-b776-3f127d9aca13 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.971110] env[61898]: DEBUG nova.network.neutron [req-f8195cb9-58d4-4427-84db-ea1db209f8a9 req-24d52b25-1a59-4425-a3ee-df01fb6d0fb1 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updated VIF entry in instance network info cache for port 0a571ec3-5857-4620-a92f-26e53930943f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1170.971523] env[61898]: DEBUG nova.network.neutron [req-f8195cb9-58d4-4427-84db-ea1db209f8a9 req-24d52b25-1a59-4425-a3ee-df01fb6d0fb1 service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updating instance_info_cache with network_info: [{"id": "0a571ec3-5857-4620-a92f-26e53930943f", "address": "fa:16:3e:f6:8b:ff", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a571ec3-58", "ovs_interfaceid": "0a571ec3-5857-4620-a92f-26e53930943f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.202979] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Preparing fetch location {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1171.203266] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Fetch image to [datastore2] OSTACK_IMG_b8782f52-05ba-47ba-8f6b-fc8bd7006dbc/OSTACK_IMG_b8782f52-05ba-47ba-8f6b-fc8bd7006dbc.vmdk {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1171.203468] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Downloading stream optimized image 0e0e658f-de9b-4b8d-99b5-38dc75cf53a2 to [datastore2] OSTACK_IMG_b8782f52-05ba-47ba-8f6b-fc8bd7006dbc/OSTACK_IMG_b8782f52-05ba-47ba-8f6b-fc8bd7006dbc.vmdk on the data store datastore2 as vApp {{(pid=61898) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1171.203674] env[61898]: DEBUG nova.virt.vmwareapi.images [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Downloading image file data 0e0e658f-de9b-4b8d-99b5-38dc75cf53a2 to the ESX as VM named 'OSTACK_IMG_b8782f52-05ba-47ba-8f6b-fc8bd7006dbc' {{(pid=61898) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1171.257430] env[61898]: DEBUG nova.network.neutron [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Successfully updated port: bb86ac8b-814c-4582-9f52-4470a92d1327 {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1171.270669] env[61898]: DEBUG oslo_vmware.rw_handles [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1171.270669] env[61898]: value = "resgroup-9" [ 1171.270669] env[61898]: _type = "ResourcePool" [ 1171.270669] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1171.270974] env[61898]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-590df8c6-2ae1-4ff6-a901-cb0dce5cc589 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.293360] env[61898]: DEBUG oslo_vmware.rw_handles [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lease: (returnval){ [ 1171.293360] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525bc10b-ee3b-fa42-d7f1-435509fa601a" [ 1171.293360] env[61898]: _type = "HttpNfcLease" [ 1171.293360] env[61898]: } obtained for vApp import into resource pool (val){ [ 1171.293360] env[61898]: value = "resgroup-9" [ 1171.293360] env[61898]: _type = "ResourcePool" [ 1171.293360] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1171.293638] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the lease: (returnval){ [ 1171.293638] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525bc10b-ee3b-fa42-d7f1-435509fa601a" [ 1171.293638] env[61898]: _type = "HttpNfcLease" [ 1171.293638] env[61898]: } to be ready. {{(pid=61898) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1171.299509] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1171.299509] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525bc10b-ee3b-fa42-d7f1-435509fa601a" [ 1171.299509] env[61898]: _type = "HttpNfcLease" [ 1171.299509] env[61898]: } is initializing. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1171.473954] env[61898]: DEBUG oslo_concurrency.lockutils [req-f8195cb9-58d4-4427-84db-ea1db209f8a9 req-24d52b25-1a59-4425-a3ee-df01fb6d0fb1 service nova] Releasing lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1171.759842] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "refresh_cache-ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1171.760047] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "refresh_cache-ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.760145] env[61898]: DEBUG nova.network.neutron [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1171.801052] env[61898]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1171.801052] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525bc10b-ee3b-fa42-d7f1-435509fa601a" [ 1171.801052] env[61898]: _type = "HttpNfcLease" [ 1171.801052] env[61898]: } is ready. {{(pid=61898) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1171.801362] env[61898]: DEBUG oslo_vmware.rw_handles [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1171.801362] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]525bc10b-ee3b-fa42-d7f1-435509fa601a" [ 1171.801362] env[61898]: _type = "HttpNfcLease" [ 1171.801362] env[61898]: }. {{(pid=61898) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1171.802118] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008b7c60-0675-4a46-9b9a-4eb46514f4fe {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.809137] env[61898]: DEBUG oslo_vmware.rw_handles [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfacd9-5a08-fb35-12c2-b9c797871165/disk-0.vmdk from lease info. {{(pid=61898) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1171.809321] env[61898]: DEBUG oslo_vmware.rw_handles [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfacd9-5a08-fb35-12c2-b9c797871165/disk-0.vmdk. {{(pid=61898) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1171.872255] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-638e2d8d-e037-4a4e-a1a6-ca4b8092a960 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.293770] env[61898]: DEBUG nova.network.neutron [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1172.432573] env[61898]: DEBUG nova.network.neutron [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Updating instance_info_cache with network_info: [{"id": "bb86ac8b-814c-4582-9f52-4470a92d1327", "address": "fa:16:3e:60:c8:2e", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb86ac8b-81", "ovs_interfaceid": "bb86ac8b-814c-4582-9f52-4470a92d1327", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1172.569700] env[61898]: DEBUG nova.compute.manager [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Received event network-vif-plugged-bb86ac8b-814c-4582-9f52-4470a92d1327 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1172.569967] env[61898]: DEBUG oslo_concurrency.lockutils [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] Acquiring lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1172.570137] env[61898]: DEBUG oslo_concurrency.lockutils [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1172.570399] env[61898]: DEBUG oslo_concurrency.lockutils [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1172.570600] env[61898]: DEBUG nova.compute.manager [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] No waiting events found dispatching network-vif-plugged-bb86ac8b-814c-4582-9f52-4470a92d1327 {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1172.570774] env[61898]: WARNING nova.compute.manager [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Received unexpected event network-vif-plugged-bb86ac8b-814c-4582-9f52-4470a92d1327 for instance with vm_state building and task_state spawning. [ 1172.570934] env[61898]: DEBUG nova.compute.manager [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Received event network-changed-bb86ac8b-814c-4582-9f52-4470a92d1327 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1172.571102] env[61898]: DEBUG nova.compute.manager [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Refreshing instance network info cache due to event network-changed-bb86ac8b-814c-4582-9f52-4470a92d1327. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1172.571276] env[61898]: DEBUG oslo_concurrency.lockutils [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] Acquiring lock "refresh_cache-ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1172.937037] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "refresh_cache-ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1172.937037] env[61898]: DEBUG nova.compute.manager [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Instance network_info: |[{"id": "bb86ac8b-814c-4582-9f52-4470a92d1327", "address": "fa:16:3e:60:c8:2e", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb86ac8b-81", "ovs_interfaceid": "bb86ac8b-814c-4582-9f52-4470a92d1327", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1172.937037] env[61898]: DEBUG oslo_concurrency.lockutils [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] Acquired lock "refresh_cache-ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.937037] env[61898]: DEBUG nova.network.neutron [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Refreshing network info cache for port bb86ac8b-814c-4582-9f52-4470a92d1327 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1172.937955] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:c8:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '418ddd3d-5f64-407e-8e0c-c8b81639bee9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb86ac8b-814c-4582-9f52-4470a92d1327', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1172.946136] env[61898]: DEBUG oslo.service.loopingcall [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1172.952044] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1172.953953] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c7ec5d5-6028-442d-9f44-826627219db3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.977379] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1172.977379] env[61898]: value = "task-1241334" [ 1172.977379] env[61898]: _type = "Task" [ 1172.977379] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.986844] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241334, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.017608] env[61898]: DEBUG oslo_vmware.rw_handles [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Completed reading data from the image iterator. {{(pid=61898) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1173.017862] env[61898]: DEBUG oslo_vmware.rw_handles [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfacd9-5a08-fb35-12c2-b9c797871165/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1173.018851] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d9abb0-25b2-453a-9660-7c41c54064ba {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.025319] env[61898]: DEBUG oslo_vmware.rw_handles [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfacd9-5a08-fb35-12c2-b9c797871165/disk-0.vmdk is in state: ready. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1173.025566] env[61898]: DEBUG oslo_vmware.rw_handles [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfacd9-5a08-fb35-12c2-b9c797871165/disk-0.vmdk. {{(pid=61898) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1173.025869] env[61898]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-d2cc193a-956c-4a7c-8d6f-ee733e68abf2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.179902] env[61898]: DEBUG nova.network.neutron [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Updated VIF entry in instance network info cache for port bb86ac8b-814c-4582-9f52-4470a92d1327. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1173.180414] env[61898]: DEBUG nova.network.neutron [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Updating instance_info_cache with network_info: [{"id": "bb86ac8b-814c-4582-9f52-4470a92d1327", "address": "fa:16:3e:60:c8:2e", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb86ac8b-81", "ovs_interfaceid": "bb86ac8b-814c-4582-9f52-4470a92d1327", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.243021] env[61898]: DEBUG oslo_vmware.rw_handles [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dfacd9-5a08-fb35-12c2-b9c797871165/disk-0.vmdk. {{(pid=61898) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1173.243200] env[61898]: INFO nova.virt.vmwareapi.images [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Downloaded image file data 0e0e658f-de9b-4b8d-99b5-38dc75cf53a2 [ 1173.243929] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75454b3-6b34-4c77-ae5c-218bcdc19c06 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.260363] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8756d7de-5e1a-4c04-bbe3-b6d507a27dc3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.293637] env[61898]: INFO nova.virt.vmwareapi.images [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] The imported VM was unregistered [ 1173.296309] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Caching image {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1173.296594] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Creating directory with path [datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2 {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1173.296909] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4952646-8be9-4fdb-bc28-37feaf687fe0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.309446] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Created directory with path [datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2 {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1173.309660] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_b8782f52-05ba-47ba-8f6b-fc8bd7006dbc/OSTACK_IMG_b8782f52-05ba-47ba-8f6b-fc8bd7006dbc.vmdk to [datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2.vmdk. {{(pid=61898) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1173.309905] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-6b4cc0bd-ccb4-4594-a65f-12c68a1e5a46 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.316325] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1173.316325] env[61898]: value = "task-1241336" [ 1173.316325] env[61898]: _type = "Task" [ 1173.316325] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.324117] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241336, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.488039] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241334, 'name': CreateVM_Task, 'duration_secs': 0.3751} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.488217] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1173.488908] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1173.489157] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.489507] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1173.489789] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86566bdc-f8d6-436a-9d20-9c6f9108b520 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.494775] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1173.494775] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]524bef0b-e504-cc70-74ca-bf254a03faa2" [ 1173.494775] env[61898]: _type = "Task" [ 1173.494775] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.504672] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524bef0b-e504-cc70-74ca-bf254a03faa2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.683883] env[61898]: DEBUG oslo_concurrency.lockutils [req-97d91589-c6ac-4b7d-b846-71a3dfa1e51d req-27120d11-499f-432e-af14-e636321da0dd service nova] Releasing lock "refresh_cache-ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1173.826277] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241336, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.006017] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]524bef0b-e504-cc70-74ca-bf254a03faa2, 'name': SearchDatastore_Task, 'duration_secs': 0.057983} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.006349] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1174.006673] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1174.006928] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1174.007100] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.007294] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1174.007625] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24d5d00a-cf5f-4f0d-ab1d-f51ccecdef25 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.024178] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1174.024450] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1174.025273] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be937d9e-64e9-4fe7-9ab7-7c4c62b3b441 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.030915] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1174.030915] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52be584d-8f19-abb9-19b2-8f639e8d2d5c" [ 1174.030915] env[61898]: _type = "Task" [ 1174.030915] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.039500] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52be584d-8f19-abb9-19b2-8f639e8d2d5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.327998] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241336, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.541224] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52be584d-8f19-abb9-19b2-8f639e8d2d5c, 'name': SearchDatastore_Task, 'duration_secs': 0.079186} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.542196] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0de06bd9-69d2-40c4-a887-27a1845c5574 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.547767] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1174.547767] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52e688af-f836-7f20-ca28-cefb3f0cf582" [ 1174.547767] env[61898]: _type = "Task" [ 1174.547767] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.556205] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e688af-f836-7f20-ca28-cefb3f0cf582, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.829141] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241336, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.059482] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52e688af-f836-7f20-ca28-cefb3f0cf582, 'name': SearchDatastore_Task, 'duration_secs': 0.077583} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.059991] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1175.060377] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] ceb13186-7bcb-44f4-9d92-d3a4dd02ac78/ceb13186-7bcb-44f4-9d92-d3a4dd02ac78.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1175.060720] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8a29800-57eb-424a-88dc-d75b2718a72f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.067513] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1175.067513] env[61898]: value = "task-1241337" [ 1175.067513] env[61898]: _type = "Task" [ 1175.067513] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.075243] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241337, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.327946] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241336, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.577612] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241337, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.829083] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241336, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.266807} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.829340] env[61898]: INFO nova.virt.vmwareapi.ds_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_b8782f52-05ba-47ba-8f6b-fc8bd7006dbc/OSTACK_IMG_b8782f52-05ba-47ba-8f6b-fc8bd7006dbc.vmdk to [datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2.vmdk. [ 1175.829565] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Cleaning up location [datastore2] OSTACK_IMG_b8782f52-05ba-47ba-8f6b-fc8bd7006dbc {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1175.829844] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_b8782f52-05ba-47ba-8f6b-fc8bd7006dbc {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1175.830200] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7269d5b2-669a-4f80-b72a-3b39e6ca07f7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.836638] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1175.836638] env[61898]: value = "task-1241338" [ 1175.836638] env[61898]: _type = "Task" [ 1175.836638] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.844138] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.078164] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241337, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.347094] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168556} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.347390] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1176.347573] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1176.347824] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2.vmdk to [datastore2] 06f718b8-2433-4eb5-8a62-9e4c79e78e63/06f718b8-2433-4eb5-8a62-9e4c79e78e63.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1176.348089] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6787e58a-6250-475e-9905-c9b865562b27 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.354644] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1176.354644] env[61898]: value = "task-1241339" [ 1176.354644] env[61898]: _type = "Task" [ 1176.354644] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.361746] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.579946] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241337, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.162483} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.580346] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] ceb13186-7bcb-44f4-9d92-d3a4dd02ac78/ceb13186-7bcb-44f4-9d92-d3a4dd02ac78.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1176.580598] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1176.580914] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56eaa2c2-60bb-4606-b009-41e5ad43b490 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.589201] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1176.589201] env[61898]: value = "task-1241340" [ 1176.589201] env[61898]: _type = "Task" [ 1176.589201] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.594757] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.600036] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.864994] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241339, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.098503] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092593} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.098794] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1177.099622] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614eac88-9d65-46b1-ad2f-7997d07f3fd9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.103200] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.103406] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.103585] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.103769] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1177.104567] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78361d5-8a0c-4d1b-b192-449c1b940d81 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.126519] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] ceb13186-7bcb-44f4-9d92-d3a4dd02ac78/ceb13186-7bcb-44f4-9d92-d3a4dd02ac78.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1177.129034] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ef6115f-c0cc-4c0b-ae7a-2b575323a2b8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.146012] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf76f24-888a-4451-ae09-2551dc2fbd4c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.162260] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a894ee4-23ef-4078-8f30-3247fc4e28f1 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.165026] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1177.165026] env[61898]: value = "task-1241341" [ 1177.165026] env[61898]: _type = "Task" [ 1177.165026] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.171481] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112c55a8-e959-4498-abbd-6e7a354ac4d8 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.177707] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241341, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.205436] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181151MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1177.205643] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.205780] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.365717] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241339, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.675297] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241341, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.866412] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241339, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.176248] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241341, 'name': ReconfigVM_Task} progress is 14%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.232840] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 06f718b8-2433-4eb5-8a62-9e4c79e78e63 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.233109] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance ceb13186-7bcb-44f4-9d92-d3a4dd02ac78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.233199] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1178.233332] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1178.272409] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b0eee3-f6c3-49fa-aeff-765370b82844 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.280275] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60925625-65ba-43e6-ac1e-10716a3cadd7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.310344] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccd7242-cb4e-4ac7-9d15-b6ae2b24f135 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.318749] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd8a207-d4cc-40d0-b05b-11e1f74e0bce {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.332600] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1178.368760] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241339, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.675871] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241341, 'name': ReconfigVM_Task, 'duration_secs': 1.513218} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.676306] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Reconfigured VM instance instance-0000006f to attach disk [datastore2] ceb13186-7bcb-44f4-9d92-d3a4dd02ac78/ceb13186-7bcb-44f4-9d92-d3a4dd02ac78.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1178.676802] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08099360-5b08-4890-9fe8-d05fb5849a82 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.682421] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1178.682421] env[61898]: value = "task-1241342" [ 1178.682421] env[61898]: _type = "Task" [ 1178.682421] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.689707] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241342, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.836037] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1178.866302] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241339, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.134165} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.866566] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2/0e0e658f-de9b-4b8d-99b5-38dc75cf53a2.vmdk to [datastore2] 06f718b8-2433-4eb5-8a62-9e4c79e78e63/06f718b8-2433-4eb5-8a62-9e4c79e78e63.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1178.867345] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a383286f-8b03-40b4-8cb6-a5f03272369d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.888394] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 06f718b8-2433-4eb5-8a62-9e4c79e78e63/06f718b8-2433-4eb5-8a62-9e4c79e78e63.vmdk or device None with type streamOptimized {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1178.888851] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17558bb1-40ba-453e-b4c5-7fb543e01834 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.906836] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1178.906836] env[61898]: value = "task-1241343" [ 1178.906836] env[61898]: _type = "Task" [ 1178.906836] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.914008] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241343, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.192287] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241342, 'name': Rename_Task, 'duration_secs': 0.139709} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.192546] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1179.192816] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-436f6ceb-7d7f-4da7-be49-735d5ef9eee2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.198570] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1179.198570] env[61898]: value = "task-1241344" [ 1179.198570] env[61898]: _type = "Task" [ 1179.198570] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.205557] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241344, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.340654] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1179.340919] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.135s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1179.418300] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241343, 'name': ReconfigVM_Task, 'duration_secs': 0.256014} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.418576] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 06f718b8-2433-4eb5-8a62-9e4c79e78e63/06f718b8-2433-4eb5-8a62-9e4c79e78e63.vmdk or device None with type streamOptimized {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1179.419206] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-910c3afd-b352-4696-8cb4-b84f49ed1a75 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.425129] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1179.425129] env[61898]: value = "task-1241345" [ 1179.425129] env[61898]: _type = "Task" [ 1179.425129] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.433506] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241345, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.707664] env[61898]: DEBUG oslo_vmware.api [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241344, 'name': PowerOnVM_Task, 'duration_secs': 0.507302} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.708113] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1179.708253] env[61898]: INFO nova.compute.manager [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Took 8.82 seconds to spawn the instance on the hypervisor. [ 1179.708327] env[61898]: DEBUG nova.compute.manager [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1179.709092] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67899d60-a0fa-401c-b486-a0a7df73255c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.935564] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241345, 'name': Rename_Task, 'duration_secs': 0.141701} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.935564] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1179.935774] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c422603-d1c7-4328-b2bb-856e5a14cdca {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.941615] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1179.941615] env[61898]: value = "task-1241346" [ 1179.941615] env[61898]: _type = "Task" [ 1179.941615] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.950281] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241346, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.225335] env[61898]: INFO nova.compute.manager [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Took 16.26 seconds to build instance. [ 1180.341765] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1180.342071] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1180.342248] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1180.342391] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Rebuilding the list of instances to heal {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 1180.453201] env[61898]: DEBUG oslo_vmware.api [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241346, 'name': PowerOnVM_Task, 'duration_secs': 0.490179} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.453600] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1180.498281] env[61898]: DEBUG nova.compute.manager [req-7fa5d7af-ff3f-47a4-b236-92665113f991 req-71a3797b-871d-4bb7-86cd-66b0db231114 service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Received event network-changed-bb86ac8b-814c-4582-9f52-4470a92d1327 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1180.498569] env[61898]: DEBUG nova.compute.manager [req-7fa5d7af-ff3f-47a4-b236-92665113f991 req-71a3797b-871d-4bb7-86cd-66b0db231114 service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Refreshing instance network info cache due to event network-changed-bb86ac8b-814c-4582-9f52-4470a92d1327. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1180.498885] env[61898]: DEBUG oslo_concurrency.lockutils [req-7fa5d7af-ff3f-47a4-b236-92665113f991 req-71a3797b-871d-4bb7-86cd-66b0db231114 service nova] Acquiring lock "refresh_cache-ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.499136] env[61898]: DEBUG oslo_concurrency.lockutils [req-7fa5d7af-ff3f-47a4-b236-92665113f991 req-71a3797b-871d-4bb7-86cd-66b0db231114 service nova] Acquired lock "refresh_cache-ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.499393] env[61898]: DEBUG nova.network.neutron [req-7fa5d7af-ff3f-47a4-b236-92665113f991 req-71a3797b-871d-4bb7-86cd-66b0db231114 service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Refreshing network info cache for port bb86ac8b-814c-4582-9f52-4470a92d1327 {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1180.573913] env[61898]: DEBUG nova.compute.manager [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1180.575186] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5454888d-cd4a-4682-ac67-a017627cc05f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.727789] env[61898]: DEBUG oslo_concurrency.lockutils [None req-77dfd186-7054-486a-b978-24f863a13419 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.769s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.845978] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1180.846150] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquired lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.846294] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Forcefully refreshing network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1180.846440] env[61898]: DEBUG nova.objects.instance [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lazy-loading 'info_cache' on Instance uuid 06f718b8-2433-4eb5-8a62-9e4c79e78e63 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1181.093647] env[61898]: DEBUG oslo_concurrency.lockutils [None req-32360a8f-f8b5-476e-994b-ab36dbc07683 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.485s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1181.392187] env[61898]: DEBUG nova.network.neutron [req-7fa5d7af-ff3f-47a4-b236-92665113f991 req-71a3797b-871d-4bb7-86cd-66b0db231114 service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Updated VIF entry in instance network info cache for port bb86ac8b-814c-4582-9f52-4470a92d1327. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1181.392595] env[61898]: DEBUG nova.network.neutron [req-7fa5d7af-ff3f-47a4-b236-92665113f991 req-71a3797b-871d-4bb7-86cd-66b0db231114 service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Updating instance_info_cache with network_info: [{"id": "bb86ac8b-814c-4582-9f52-4470a92d1327", "address": "fa:16:3e:60:c8:2e", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb86ac8b-81", "ovs_interfaceid": "bb86ac8b-814c-4582-9f52-4470a92d1327", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.896014] env[61898]: DEBUG oslo_concurrency.lockutils [req-7fa5d7af-ff3f-47a4-b236-92665113f991 req-71a3797b-871d-4bb7-86cd-66b0db231114 service nova] Releasing lock "refresh_cache-ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.316476] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.316777] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.316957] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.317164] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.317345] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.319720] env[61898]: INFO nova.compute.manager [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Terminating instance [ 1182.582812] env[61898]: DEBUG nova.network.neutron [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updating instance_info_cache with network_info: [{"id": "0a571ec3-5857-4620-a92f-26e53930943f", "address": "fa:16:3e:f6:8b:ff", "network": {"id": "70fe3742-8e62-4978-9f6d-b7d6d23c91a2", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-918783122-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "975e564bd7f442629018b97007460e00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69054a13-b7ef-44e1-bd3b-3ca5ba602848", "external-id": "nsx-vlan-transportzone-153", "segmentation_id": 153, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0a571ec3-58", "ovs_interfaceid": "0a571ec3-5857-4620-a92f-26e53930943f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.823630] env[61898]: DEBUG nova.compute.manager [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1182.823893] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1182.824861] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1655d1-89cb-4849-af3a-50bba547855b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.832709] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1182.832948] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ba73536-99a8-4b77-87fd-c6f79a30cc29 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.839222] env[61898]: DEBUG oslo_vmware.api [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1182.839222] env[61898]: value = "task-1241347" [ 1182.839222] env[61898]: _type = "Task" [ 1182.839222] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.847313] env[61898]: DEBUG oslo_vmware.api [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241347, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.085794] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Releasing lock "refresh_cache-06f718b8-2433-4eb5-8a62-9e4c79e78e63" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.086087] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updated the network info_cache for instance {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10328}} [ 1183.086276] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.086450] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.086599] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.086748] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.086889] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.087053] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.087185] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61898) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1183.335639] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.348810] env[61898]: DEBUG oslo_vmware.api [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241347, 'name': PowerOffVM_Task, 'duration_secs': 0.180259} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.349377] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1183.349637] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1183.350635] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8fc0f88-55e3-4a9a-85e3-75ecd2bbea6e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.413352] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1183.413586] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1183.413809] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleting the datastore file [datastore2] 06f718b8-2433-4eb5-8a62-9e4c79e78e63 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1183.414094] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c7dd931-ad7b-4830-ad10-a94043973b94 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.421584] env[61898]: DEBUG oslo_vmware.api [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for the task: (returnval){ [ 1183.421584] env[61898]: value = "task-1241349" [ 1183.421584] env[61898]: _type = "Task" [ 1183.421584] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.428775] env[61898]: DEBUG oslo_vmware.api [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241349, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.932061] env[61898]: DEBUG oslo_vmware.api [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Task: {'id': task-1241349, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19369} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.932385] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1183.932618] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1183.932876] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1183.933111] env[61898]: INFO nova.compute.manager [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1183.933403] env[61898]: DEBUG oslo.service.loopingcall [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1183.933700] env[61898]: DEBUG nova.compute.manager [-] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1183.933812] env[61898]: DEBUG nova.network.neutron [-] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1184.406940] env[61898]: DEBUG nova.compute.manager [req-833ac49a-f8bd-4787-9f32-9103c559111c req-e534d2a2-2571-4d2a-a432-bf02e02c1a5d service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Received event network-vif-deleted-0a571ec3-5857-4620-a92f-26e53930943f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1184.407217] env[61898]: INFO nova.compute.manager [req-833ac49a-f8bd-4787-9f32-9103c559111c req-e534d2a2-2571-4d2a-a432-bf02e02c1a5d service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Neutron deleted interface 0a571ec3-5857-4620-a92f-26e53930943f; detaching it from the instance and deleting it from the info cache [ 1184.407348] env[61898]: DEBUG nova.network.neutron [req-833ac49a-f8bd-4787-9f32-9103c559111c req-e534d2a2-2571-4d2a-a432-bf02e02c1a5d service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.887453] env[61898]: DEBUG nova.network.neutron [-] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.909549] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-710cda3a-ab43-4b31-b1e9-2faa4dff5e97 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.919290] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62668f70-5e56-4652-aaf3-4c32b334bd1d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.941953] env[61898]: DEBUG nova.compute.manager [req-833ac49a-f8bd-4787-9f32-9103c559111c req-e534d2a2-2571-4d2a-a432-bf02e02c1a5d service nova] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Detach interface failed, port_id=0a571ec3-5857-4620-a92f-26e53930943f, reason: Instance 06f718b8-2433-4eb5-8a62-9e4c79e78e63 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1185.390826] env[61898]: INFO nova.compute.manager [-] [instance: 06f718b8-2433-4eb5-8a62-9e4c79e78e63] Took 1.46 seconds to deallocate network for instance. [ 1185.897363] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.897740] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.897889] env[61898]: DEBUG nova.objects.instance [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lazy-loading 'resources' on Instance uuid 06f718b8-2433-4eb5-8a62-9e4c79e78e63 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1186.440071] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a681585a-ed98-4faa-9e78-8454b9e73c3f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.447742] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0d5f6a-f433-41ca-a96f-25c2525725ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.477444] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462bf882-8daf-49a9-a814-06c240b24b92 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.484105] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a24544-0436-4caf-badc-f2b0df8fb5ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.496829] env[61898]: DEBUG nova.compute.provider_tree [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1186.999839] env[61898]: DEBUG nova.scheduler.client.report [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1187.505613] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.608s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.527063] env[61898]: INFO nova.scheduler.client.report [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Deleted allocations for instance 06f718b8-2433-4eb5-8a62-9e4c79e78e63 [ 1188.036841] env[61898]: DEBUG oslo_concurrency.lockutils [None req-5530e873-42e9-4163-867a-790350b97c69 tempest-ServerActionsTestOtherB-1148223455 tempest-ServerActionsTestOtherB-1148223455-project-member] Lock "06f718b8-2433-4eb5-8a62-9e4c79e78e63" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.720s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.429093] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquiring lock "513bb114-5c0b-4c23-bffb-147548f64030" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.429452] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Lock "513bb114-5c0b-4c23-bffb-147548f64030" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.935367] env[61898]: DEBUG nova.compute.manager [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1197.457476] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1197.457726] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1197.459219] env[61898]: INFO nova.compute.claims [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1198.505860] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd2cb10-e593-4f79-a103-329548b1e9d9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.514668] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493e1bab-4006-4c72-8d34-72c6c4c8d645 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.546779] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd9c591-f5c7-46c3-b4bf-054dfa1dc781 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.554224] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53a7546-0157-4fcd-b2ae-2139dbd2ec9c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.567281] env[61898]: DEBUG nova.compute.provider_tree [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.070299] env[61898]: DEBUG nova.scheduler.client.report [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1199.576334] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.118s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1199.576899] env[61898]: DEBUG nova.compute.manager [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1200.081886] env[61898]: DEBUG nova.compute.utils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1200.083426] env[61898]: DEBUG nova.compute.manager [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1200.083606] env[61898]: DEBUG nova.network.neutron [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1200.121905] env[61898]: DEBUG nova.policy [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf774862c9eb4dff813e0ce587b91464', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6e712c632b74608acf8b12aa7ca90c9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 1200.354995] env[61898]: DEBUG nova.network.neutron [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Successfully created port: 8e1a97b3-f12b-4830-84f4-e53cd820030f {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1200.587523] env[61898]: DEBUG nova.compute.manager [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1201.597625] env[61898]: DEBUG nova.compute.manager [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1201.622839] env[61898]: DEBUG nova.virt.hardware [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1201.623145] env[61898]: DEBUG nova.virt.hardware [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1201.623320] env[61898]: DEBUG nova.virt.hardware [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1201.623515] env[61898]: DEBUG nova.virt.hardware [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1201.623668] env[61898]: DEBUG nova.virt.hardware [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1201.623822] env[61898]: DEBUG nova.virt.hardware [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1201.624071] env[61898]: DEBUG nova.virt.hardware [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1201.624225] env[61898]: DEBUG nova.virt.hardware [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1201.624394] env[61898]: DEBUG nova.virt.hardware [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1201.624560] env[61898]: DEBUG nova.virt.hardware [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1201.624735] env[61898]: DEBUG nova.virt.hardware [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1201.625619] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c6bc1c-da8a-46f9-b58f-a8acb546ee7b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.633577] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e74d290-d115-4eb7-a9b0-5440fabdaab0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.727348] env[61898]: DEBUG nova.compute.manager [req-6f884371-2fb0-4410-8909-ed076b16f1ad req-fd19e5f4-3840-4eaf-abb1-b4a43f5c282d service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Received event network-vif-plugged-8e1a97b3-f12b-4830-84f4-e53cd820030f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1201.727617] env[61898]: DEBUG oslo_concurrency.lockutils [req-6f884371-2fb0-4410-8909-ed076b16f1ad req-fd19e5f4-3840-4eaf-abb1-b4a43f5c282d service nova] Acquiring lock "513bb114-5c0b-4c23-bffb-147548f64030-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.727853] env[61898]: DEBUG oslo_concurrency.lockutils [req-6f884371-2fb0-4410-8909-ed076b16f1ad req-fd19e5f4-3840-4eaf-abb1-b4a43f5c282d service nova] Lock "513bb114-5c0b-4c23-bffb-147548f64030-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.728229] env[61898]: DEBUG oslo_concurrency.lockutils [req-6f884371-2fb0-4410-8909-ed076b16f1ad req-fd19e5f4-3840-4eaf-abb1-b4a43f5c282d service nova] Lock "513bb114-5c0b-4c23-bffb-147548f64030-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1201.728464] env[61898]: DEBUG nova.compute.manager [req-6f884371-2fb0-4410-8909-ed076b16f1ad req-fd19e5f4-3840-4eaf-abb1-b4a43f5c282d service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] No waiting events found dispatching network-vif-plugged-8e1a97b3-f12b-4830-84f4-e53cd820030f {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1201.728657] env[61898]: WARNING nova.compute.manager [req-6f884371-2fb0-4410-8909-ed076b16f1ad req-fd19e5f4-3840-4eaf-abb1-b4a43f5c282d service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Received unexpected event network-vif-plugged-8e1a97b3-f12b-4830-84f4-e53cd820030f for instance with vm_state building and task_state spawning. [ 1201.804743] env[61898]: DEBUG nova.network.neutron [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Successfully updated port: 8e1a97b3-f12b-4830-84f4-e53cd820030f {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1202.308117] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquiring lock "refresh_cache-513bb114-5c0b-4c23-bffb-147548f64030" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1202.308334] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquired lock "refresh_cache-513bb114-5c0b-4c23-bffb-147548f64030" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.308532] env[61898]: DEBUG nova.network.neutron [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1202.842495] env[61898]: DEBUG nova.network.neutron [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1202.964727] env[61898]: DEBUG nova.network.neutron [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Updating instance_info_cache with network_info: [{"id": "8e1a97b3-f12b-4830-84f4-e53cd820030f", "address": "fa:16:3e:75:96:1a", "network": {"id": "9b7de880-c656-4e1d-b6d2-84e6171a6a41", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2076425503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e712c632b74608acf8b12aa7ca90c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e1a97b3-f1", "ovs_interfaceid": "8e1a97b3-f12b-4830-84f4-e53cd820030f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.468049] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Releasing lock "refresh_cache-513bb114-5c0b-4c23-bffb-147548f64030" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1203.468049] env[61898]: DEBUG nova.compute.manager [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Instance network_info: |[{"id": "8e1a97b3-f12b-4830-84f4-e53cd820030f", "address": "fa:16:3e:75:96:1a", "network": {"id": "9b7de880-c656-4e1d-b6d2-84e6171a6a41", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2076425503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e712c632b74608acf8b12aa7ca90c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e1a97b3-f1", "ovs_interfaceid": "8e1a97b3-f12b-4830-84f4-e53cd820030f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1203.468267] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:96:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6def6dc5-d564-45ca-8f4f-7c820677e6e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e1a97b3-f12b-4830-84f4-e53cd820030f', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1203.475655] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Creating folder: Project (b6e712c632b74608acf8b12aa7ca90c9). Parent ref: group-v267550. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1203.475923] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5a817df-35cc-41cd-bc96-689aa0d70df4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.487380] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Created folder: Project (b6e712c632b74608acf8b12aa7ca90c9) in parent group-v267550. [ 1203.487561] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Creating folder: Instances. Parent ref: group-v267743. {{(pid=61898) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1203.487773] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a0ce3da-41ef-4141-b06d-1573d221297a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.496223] env[61898]: INFO nova.virt.vmwareapi.vm_util [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Created folder: Instances in parent group-v267743. [ 1203.496456] env[61898]: DEBUG oslo.service.loopingcall [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1203.496633] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1203.496818] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-347c39ef-9503-4fb8-a3a6-8c313eeed3ef {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.514418] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1203.514418] env[61898]: value = "task-1241353" [ 1203.514418] env[61898]: _type = "Task" [ 1203.514418] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.521164] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241353, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.752839] env[61898]: DEBUG nova.compute.manager [req-43b393b6-a017-4551-89e7-bd8451be25b4 req-de20c896-0509-4ed9-bfda-c820dfc5bd5b service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Received event network-changed-8e1a97b3-f12b-4830-84f4-e53cd820030f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1203.753065] env[61898]: DEBUG nova.compute.manager [req-43b393b6-a017-4551-89e7-bd8451be25b4 req-de20c896-0509-4ed9-bfda-c820dfc5bd5b service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Refreshing instance network info cache due to event network-changed-8e1a97b3-f12b-4830-84f4-e53cd820030f. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1203.753316] env[61898]: DEBUG oslo_concurrency.lockutils [req-43b393b6-a017-4551-89e7-bd8451be25b4 req-de20c896-0509-4ed9-bfda-c820dfc5bd5b service nova] Acquiring lock "refresh_cache-513bb114-5c0b-4c23-bffb-147548f64030" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.753466] env[61898]: DEBUG oslo_concurrency.lockutils [req-43b393b6-a017-4551-89e7-bd8451be25b4 req-de20c896-0509-4ed9-bfda-c820dfc5bd5b service nova] Acquired lock "refresh_cache-513bb114-5c0b-4c23-bffb-147548f64030" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.753652] env[61898]: DEBUG nova.network.neutron [req-43b393b6-a017-4551-89e7-bd8451be25b4 req-de20c896-0509-4ed9-bfda-c820dfc5bd5b service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Refreshing network info cache for port 8e1a97b3-f12b-4830-84f4-e53cd820030f {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1204.025059] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241353, 'name': CreateVM_Task, 'duration_secs': 0.284075} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.025059] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1204.025529] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.025657] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.026018] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1204.026328] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cec917c-c028-4d48-a2b6-f8b72937f7b0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.030511] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for the task: (returnval){ [ 1204.030511] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52ac2bb8-23c0-2c93-5eca-2afd457e6940" [ 1204.030511] env[61898]: _type = "Task" [ 1204.030511] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.037563] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ac2bb8-23c0-2c93-5eca-2afd457e6940, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.436550] env[61898]: DEBUG nova.network.neutron [req-43b393b6-a017-4551-89e7-bd8451be25b4 req-de20c896-0509-4ed9-bfda-c820dfc5bd5b service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Updated VIF entry in instance network info cache for port 8e1a97b3-f12b-4830-84f4-e53cd820030f. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1204.436949] env[61898]: DEBUG nova.network.neutron [req-43b393b6-a017-4551-89e7-bd8451be25b4 req-de20c896-0509-4ed9-bfda-c820dfc5bd5b service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Updating instance_info_cache with network_info: [{"id": "8e1a97b3-f12b-4830-84f4-e53cd820030f", "address": "fa:16:3e:75:96:1a", "network": {"id": "9b7de880-c656-4e1d-b6d2-84e6171a6a41", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-2076425503-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e712c632b74608acf8b12aa7ca90c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6def6dc5-d564-45ca-8f4f-7c820677e6e2", "external-id": "nsx-vlan-transportzone-53", "segmentation_id": 53, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e1a97b3-f1", "ovs_interfaceid": "8e1a97b3-f12b-4830-84f4-e53cd820030f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.540618] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52ac2bb8-23c0-2c93-5eca-2afd457e6940, 'name': SearchDatastore_Task, 'duration_secs': 0.008896} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.540874] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1204.541130] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1204.541367] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1204.541518] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.541698] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1204.541948] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72d57284-42ca-44f5-a65f-03709d115fcc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.549713] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1204.549894] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1204.550562] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-891b0d94-0041-474b-a8a6-617a71c439e5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.555277] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for the task: (returnval){ [ 1204.555277] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52d5a7a8-45eb-61df-ae8e-8163d9b2c4ab" [ 1204.555277] env[61898]: _type = "Task" [ 1204.555277] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.562323] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d5a7a8-45eb-61df-ae8e-8163d9b2c4ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.939806] env[61898]: DEBUG oslo_concurrency.lockutils [req-43b393b6-a017-4551-89e7-bd8451be25b4 req-de20c896-0509-4ed9-bfda-c820dfc5bd5b service nova] Releasing lock "refresh_cache-513bb114-5c0b-4c23-bffb-147548f64030" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.065467] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52d5a7a8-45eb-61df-ae8e-8163d9b2c4ab, 'name': SearchDatastore_Task, 'duration_secs': 0.007667} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.066198] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-450ee77f-b1cf-45d1-9836-4d4ec2d2dd66 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.070962] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for the task: (returnval){ [ 1205.070962] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52c619da-a715-c4a2-2649-6b224054e38f" [ 1205.070962] env[61898]: _type = "Task" [ 1205.070962] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.078397] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c619da-a715-c4a2-2649-6b224054e38f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.582105] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52c619da-a715-c4a2-2649-6b224054e38f, 'name': SearchDatastore_Task, 'duration_secs': 0.008527} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.582374] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1205.582632] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 513bb114-5c0b-4c23-bffb-147548f64030/513bb114-5c0b-4c23-bffb-147548f64030.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1205.582877] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb91fdbf-3111-4b09-b9e6-a7d9ed334309 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.589309] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for the task: (returnval){ [ 1205.589309] env[61898]: value = "task-1241354" [ 1205.589309] env[61898]: _type = "Task" [ 1205.589309] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.596518] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.099353] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241354, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.404041} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.099724] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 513bb114-5c0b-4c23-bffb-147548f64030/513bb114-5c0b-4c23-bffb-147548f64030.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1206.099724] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1206.099969] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d539a28-ba0c-48e8-93ff-f956c45a7ec2 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.106862] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for the task: (returnval){ [ 1206.106862] env[61898]: value = "task-1241355" [ 1206.106862] env[61898]: _type = "Task" [ 1206.106862] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.113704] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241355, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.616735] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241355, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070794} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.616998] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1206.617756] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe7f3b1-b636-4eba-860f-056f145988c4 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.638533] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 513bb114-5c0b-4c23-bffb-147548f64030/513bb114-5c0b-4c23-bffb-147548f64030.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1206.638790] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95a6bf2d-bba6-4b47-8018-8d8ea271a5ac {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.657753] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for the task: (returnval){ [ 1206.657753] env[61898]: value = "task-1241356" [ 1206.657753] env[61898]: _type = "Task" [ 1206.657753] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.664933] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241356, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.167416] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241356, 'name': ReconfigVM_Task, 'duration_secs': 0.249008} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.167782] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 513bb114-5c0b-4c23-bffb-147548f64030/513bb114-5c0b-4c23-bffb-147548f64030.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1207.168290] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1a34a5e-f036-4409-b9f5-5231f666be43 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.173943] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for the task: (returnval){ [ 1207.173943] env[61898]: value = "task-1241357" [ 1207.173943] env[61898]: _type = "Task" [ 1207.173943] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.181291] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241357, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.683862] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241357, 'name': Rename_Task, 'duration_secs': 0.134982} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.684225] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1207.684474] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-824f6611-5133-409a-a6bb-452eab000629 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.690851] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for the task: (returnval){ [ 1207.690851] env[61898]: value = "task-1241358" [ 1207.690851] env[61898]: _type = "Task" [ 1207.690851] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.697933] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241358, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.200436] env[61898]: DEBUG oslo_vmware.api [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241358, 'name': PowerOnVM_Task, 'duration_secs': 0.426657} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.200821] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1208.200920] env[61898]: INFO nova.compute.manager [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Took 6.60 seconds to spawn the instance on the hypervisor. [ 1208.201118] env[61898]: DEBUG nova.compute.manager [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1208.201882] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a461303-ffa3-4da0-b3ba-095565ec3011 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.718601] env[61898]: INFO nova.compute.manager [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Took 11.28 seconds to build instance. [ 1209.219831] env[61898]: DEBUG oslo_concurrency.lockutils [None req-473b12fc-528f-4ac1-aeec-e1991140f162 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Lock "513bb114-5c0b-4c23-bffb-147548f64030" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.790s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.547626] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquiring lock "513bb114-5c0b-4c23-bffb-147548f64030" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.547932] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Lock "513bb114-5c0b-4c23-bffb-147548f64030" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.548186] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquiring lock "513bb114-5c0b-4c23-bffb-147548f64030-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1209.548379] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Lock "513bb114-5c0b-4c23-bffb-147548f64030-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1209.548555] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Lock "513bb114-5c0b-4c23-bffb-147548f64030-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.551109] env[61898]: INFO nova.compute.manager [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Terminating instance [ 1210.055299] env[61898]: DEBUG nova.compute.manager [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1210.055590] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1210.056937] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290167b8-390c-41af-b875-2d380a55833b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.064214] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1210.064443] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44f1d699-1bdc-4ee9-8688-0c6308ebf802 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.070714] env[61898]: DEBUG oslo_vmware.api [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for the task: (returnval){ [ 1210.070714] env[61898]: value = "task-1241359" [ 1210.070714] env[61898]: _type = "Task" [ 1210.070714] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.078549] env[61898]: DEBUG oslo_vmware.api [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241359, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.581897] env[61898]: DEBUG oslo_vmware.api [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241359, 'name': PowerOffVM_Task, 'duration_secs': 0.176348} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.582278] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1210.582465] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1210.582797] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ea48897-e3e0-48d6-9361-9e3f25412da3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.645015] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1210.645336] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1210.645526] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Deleting the datastore file [datastore2] 513bb114-5c0b-4c23-bffb-147548f64030 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1210.645797] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2108c4e-d950-4677-8b67-376c6bbbcbee {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.651677] env[61898]: DEBUG oslo_vmware.api [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for the task: (returnval){ [ 1210.651677] env[61898]: value = "task-1241361" [ 1210.651677] env[61898]: _type = "Task" [ 1210.651677] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.659482] env[61898]: DEBUG oslo_vmware.api [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241361, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.161143] env[61898]: DEBUG oslo_vmware.api [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Task: {'id': task-1241361, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148272} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.161403] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1211.161592] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1211.161774] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1211.161955] env[61898]: INFO nova.compute.manager [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1211.162219] env[61898]: DEBUG oslo.service.loopingcall [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1211.162417] env[61898]: DEBUG nova.compute.manager [-] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1211.162510] env[61898]: DEBUG nova.network.neutron [-] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1211.444991] env[61898]: DEBUG nova.compute.manager [req-dc8f78ca-420b-459f-8f5a-4bd69115df30 req-ac5e16d1-bdff-4a2d-ba1c-9431decc2d55 service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Received event network-vif-deleted-8e1a97b3-f12b-4830-84f4-e53cd820030f {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1211.444991] env[61898]: INFO nova.compute.manager [req-dc8f78ca-420b-459f-8f5a-4bd69115df30 req-ac5e16d1-bdff-4a2d-ba1c-9431decc2d55 service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Neutron deleted interface 8e1a97b3-f12b-4830-84f4-e53cd820030f; detaching it from the instance and deleting it from the info cache [ 1211.445412] env[61898]: DEBUG nova.network.neutron [req-dc8f78ca-420b-459f-8f5a-4bd69115df30 req-ac5e16d1-bdff-4a2d-ba1c-9431decc2d55 service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.923422] env[61898]: DEBUG nova.network.neutron [-] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.947853] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c9c1d78-eb40-4e4d-b461-95c476699ffb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.957902] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517888ff-4fc3-4f81-97e7-d6269755167f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.981514] env[61898]: DEBUG nova.compute.manager [req-dc8f78ca-420b-459f-8f5a-4bd69115df30 req-ac5e16d1-bdff-4a2d-ba1c-9431decc2d55 service nova] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Detach interface failed, port_id=8e1a97b3-f12b-4830-84f4-e53cd820030f, reason: Instance 513bb114-5c0b-4c23-bffb-147548f64030 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1212.426396] env[61898]: INFO nova.compute.manager [-] [instance: 513bb114-5c0b-4c23-bffb-147548f64030] Took 1.26 seconds to deallocate network for instance. [ 1212.933436] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.933817] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1212.933970] env[61898]: DEBUG nova.objects.instance [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Lazy-loading 'resources' on Instance uuid 513bb114-5c0b-4c23-bffb-147548f64030 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.475653] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f2f77a-8f6d-446a-813c-2558e10b7d72 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.483120] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1642eef-4819-4549-84b7-4ec2ccdd7c1f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.512594] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d8ae8b-bf3c-4ae2-b4f9-ced2ee3e3309 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.519500] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d2aab2-a4fc-44c6-8304-b5cab861cc83 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.532333] env[61898]: DEBUG nova.compute.provider_tree [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1214.035537] env[61898]: DEBUG nova.scheduler.client.report [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1214.540449] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.558169] env[61898]: INFO nova.scheduler.client.report [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Deleted allocations for instance 513bb114-5c0b-4c23-bffb-147548f64030 [ 1215.066925] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c016d133-ddfb-403d-97e4-135fbfd107c7 tempest-ServerAddressesNegativeTestJSON-815688428 tempest-ServerAddressesNegativeTestJSON-815688428-project-member] Lock "513bb114-5c0b-4c23-bffb-147548f64030" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.518s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.689291] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1217.689566] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.192821] env[61898]: DEBUG nova.compute.utils [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1218.695682] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.755341] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.755720] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.755808] env[61898]: INFO nova.compute.manager [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Attaching volume 169760ac-d3b8-4373-ba35-254b20d1b80a to /dev/sdb [ 1219.785022] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a974ec-df7e-48af-8e51-ba333176a12f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.792287] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6870798-d85d-43ea-91e0-15c6e6370f0c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.808089] env[61898]: DEBUG nova.virt.block_device [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Updating existing volume attachment record: 3f01de2e-fa3c-4463-a0dd-5b8bf637cb8c {{(pid=61898) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1224.350621] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Volume attach. Driver type: vmdk {{(pid=61898) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1224.350942] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267746', 'volume_id': '169760ac-d3b8-4373-ba35-254b20d1b80a', 'name': 'volume-169760ac-d3b8-4373-ba35-254b20d1b80a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ceb13186-7bcb-44f4-9d92-d3a4dd02ac78', 'attached_at': '', 'detached_at': '', 'volume_id': '169760ac-d3b8-4373-ba35-254b20d1b80a', 'serial': '169760ac-d3b8-4373-ba35-254b20d1b80a'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1224.351874] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2aff74-bba5-4b0d-a058-e9fe0c243fbd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.368186] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76072265-065f-461b-b1b2-5c68ab0ac72d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.391265] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] volume-169760ac-d3b8-4373-ba35-254b20d1b80a/volume-169760ac-d3b8-4373-ba35-254b20d1b80a.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1224.391500] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d46adfd8-4e27-4193-bd97-a64782704586 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.408336] env[61898]: DEBUG oslo_vmware.api [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1224.408336] env[61898]: value = "task-1241364" [ 1224.408336] env[61898]: _type = "Task" [ 1224.408336] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.417160] env[61898]: DEBUG oslo_vmware.api [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241364, 'name': ReconfigVM_Task} progress is 6%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.917536] env[61898]: DEBUG oslo_vmware.api [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241364, 'name': ReconfigVM_Task, 'duration_secs': 0.332843} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.917824] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Reconfigured VM instance instance-0000006f to attach disk [datastore2] volume-169760ac-d3b8-4373-ba35-254b20d1b80a/volume-169760ac-d3b8-4373-ba35-254b20d1b80a.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1224.922338] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15f43b26-c183-4833-98b9-c0ad20ed9b5e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.936206] env[61898]: DEBUG oslo_vmware.api [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1224.936206] env[61898]: value = "task-1241365" [ 1224.936206] env[61898]: _type = "Task" [ 1224.936206] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.943775] env[61898]: DEBUG oslo_vmware.api [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241365, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.446529] env[61898]: DEBUG oslo_vmware.api [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241365, 'name': ReconfigVM_Task, 'duration_secs': 0.130324} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.446882] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267746', 'volume_id': '169760ac-d3b8-4373-ba35-254b20d1b80a', 'name': 'volume-169760ac-d3b8-4373-ba35-254b20d1b80a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ceb13186-7bcb-44f4-9d92-d3a4dd02ac78', 'attached_at': '', 'detached_at': '', 'volume_id': '169760ac-d3b8-4373-ba35-254b20d1b80a', 'serial': '169760ac-d3b8-4373-ba35-254b20d1b80a'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1226.483417] env[61898]: DEBUG nova.objects.instance [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lazy-loading 'flavor' on Instance uuid ceb13186-7bcb-44f4-9d92-d3a4dd02ac78 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.989077] env[61898]: DEBUG oslo_concurrency.lockutils [None req-ad781f9e-8d46-4279-9950-cfa7c01e06a7 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.233s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.205586] env[61898]: DEBUG oslo_concurrency.lockutils [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.205828] env[61898]: DEBUG oslo_concurrency.lockutils [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.709386] env[61898]: INFO nova.compute.manager [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Detaching volume 169760ac-d3b8-4373-ba35-254b20d1b80a [ 1227.738485] env[61898]: INFO nova.virt.block_device [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Attempting to driver detach volume 169760ac-d3b8-4373-ba35-254b20d1b80a from mountpoint /dev/sdb [ 1227.738722] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Volume detach. Driver type: vmdk {{(pid=61898) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1227.738968] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267746', 'volume_id': '169760ac-d3b8-4373-ba35-254b20d1b80a', 'name': 'volume-169760ac-d3b8-4373-ba35-254b20d1b80a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ceb13186-7bcb-44f4-9d92-d3a4dd02ac78', 'attached_at': '', 'detached_at': '', 'volume_id': '169760ac-d3b8-4373-ba35-254b20d1b80a', 'serial': '169760ac-d3b8-4373-ba35-254b20d1b80a'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1227.739830] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae19a57-d5ec-411d-9ea6-1ee7cbf35429 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.760647] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310e4b83-f9b8-4e8a-b42b-eabe6638622e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.766782] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee02db80-4e64-4154-86be-d52cf06aee16 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.786750] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8eecbf-3481-4a6e-ba74-de8cd27adecb {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.800406] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] The volume has not been displaced from its original location: [datastore2] volume-169760ac-d3b8-4373-ba35-254b20d1b80a/volume-169760ac-d3b8-4373-ba35-254b20d1b80a.vmdk. No consolidation needed. {{(pid=61898) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1227.805478] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Reconfiguring VM instance instance-0000006f to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1227.805723] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d18dccc-3343-4d6f-ac60-0739066fc424 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.822377] env[61898]: DEBUG oslo_vmware.api [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1227.822377] env[61898]: value = "task-1241366" [ 1227.822377] env[61898]: _type = "Task" [ 1227.822377] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.829441] env[61898]: DEBUG oslo_vmware.api [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241366, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.331474] env[61898]: DEBUG oslo_vmware.api [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241366, 'name': ReconfigVM_Task, 'duration_secs': 0.211174} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.331780] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Reconfigured VM instance instance-0000006f to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1228.336372] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a7455ae-3141-42ec-af5f-e1e2dcd3b61e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.350967] env[61898]: DEBUG oslo_vmware.api [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1228.350967] env[61898]: value = "task-1241367" [ 1228.350967] env[61898]: _type = "Task" [ 1228.350967] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.358202] env[61898]: DEBUG oslo_vmware.api [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241367, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.860366] env[61898]: DEBUG oslo_vmware.api [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241367, 'name': ReconfigVM_Task, 'duration_secs': 0.131159} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.860694] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267746', 'volume_id': '169760ac-d3b8-4373-ba35-254b20d1b80a', 'name': 'volume-169760ac-d3b8-4373-ba35-254b20d1b80a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ceb13186-7bcb-44f4-9d92-d3a4dd02ac78', 'attached_at': '', 'detached_at': '', 'volume_id': '169760ac-d3b8-4373-ba35-254b20d1b80a', 'serial': '169760ac-d3b8-4373-ba35-254b20d1b80a'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1229.400800] env[61898]: DEBUG nova.objects.instance [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lazy-loading 'flavor' on Instance uuid ceb13186-7bcb-44f4-9d92-d3a4dd02ac78 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.407933] env[61898]: DEBUG oslo_concurrency.lockutils [None req-71347206-5a21-468b-b7c1-368fa0b64a3f tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.202s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.449243] env[61898]: DEBUG oslo_concurrency.lockutils [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.449576] env[61898]: DEBUG oslo_concurrency.lockutils [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.449732] env[61898]: DEBUG oslo_concurrency.lockutils [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.449924] env[61898]: DEBUG oslo_concurrency.lockutils [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.450112] env[61898]: DEBUG oslo_concurrency.lockutils [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.452223] env[61898]: INFO nova.compute.manager [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Terminating instance [ 1231.956580] env[61898]: DEBUG nova.compute.manager [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1231.956851] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1231.957736] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c5c039-7389-4196-b84b-79f87b599192 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.965677] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1231.965908] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fb9d545-8240-4877-8751-78961c2dd534 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.972179] env[61898]: DEBUG oslo_vmware.api [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1231.972179] env[61898]: value = "task-1241368" [ 1231.972179] env[61898]: _type = "Task" [ 1231.972179] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.980645] env[61898]: DEBUG oslo_vmware.api [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241368, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.482337] env[61898]: DEBUG oslo_vmware.api [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241368, 'name': PowerOffVM_Task, 'duration_secs': 0.154406} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.482704] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1232.482704] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1232.482951] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35c96b20-e0e9-404f-8199-f2d8b50cf980 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.543558] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1232.543815] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1232.544016] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleting the datastore file [datastore2] ceb13186-7bcb-44f4-9d92-d3a4dd02ac78 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1232.544288] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-592178cb-9ccd-427a-a6cf-221a951cc820 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.550148] env[61898]: DEBUG oslo_vmware.api [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1232.550148] env[61898]: value = "task-1241370" [ 1232.550148] env[61898]: _type = "Task" [ 1232.550148] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.557193] env[61898]: DEBUG oslo_vmware.api [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241370, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.059968] env[61898]: DEBUG oslo_vmware.api [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241370, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123993} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.060268] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1233.060462] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1233.060643] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1233.060822] env[61898]: INFO nova.compute.manager [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1233.061086] env[61898]: DEBUG oslo.service.loopingcall [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1233.061290] env[61898]: DEBUG nova.compute.manager [-] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1233.061387] env[61898]: DEBUG nova.network.neutron [-] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1233.516901] env[61898]: DEBUG nova.compute.manager [req-db5bd8da-4c16-4e2d-9fad-70aaaa60a555 req-fc5b9237-7378-45cc-ac11-3b54bd06544c service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Received event network-vif-deleted-bb86ac8b-814c-4582-9f52-4470a92d1327 {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1233.517165] env[61898]: INFO nova.compute.manager [req-db5bd8da-4c16-4e2d-9fad-70aaaa60a555 req-fc5b9237-7378-45cc-ac11-3b54bd06544c service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Neutron deleted interface bb86ac8b-814c-4582-9f52-4470a92d1327; detaching it from the instance and deleting it from the info cache [ 1233.517412] env[61898]: DEBUG nova.network.neutron [req-db5bd8da-4c16-4e2d-9fad-70aaaa60a555 req-fc5b9237-7378-45cc-ac11-3b54bd06544c service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1233.987357] env[61898]: DEBUG nova.network.neutron [-] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.020543] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c3c0b417-87ea-4c24-8654-b21f308146cf {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.029432] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701cd6f1-bb9b-4cc8-9a0d-95036701cc16 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.051279] env[61898]: DEBUG nova.compute.manager [req-db5bd8da-4c16-4e2d-9fad-70aaaa60a555 req-fc5b9237-7378-45cc-ac11-3b54bd06544c service nova] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Detach interface failed, port_id=bb86ac8b-814c-4582-9f52-4470a92d1327, reason: Instance ceb13186-7bcb-44f4-9d92-d3a4dd02ac78 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1234.490899] env[61898]: INFO nova.compute.manager [-] [instance: ceb13186-7bcb-44f4-9d92-d3a4dd02ac78] Took 1.43 seconds to deallocate network for instance. [ 1234.997386] env[61898]: DEBUG oslo_concurrency.lockutils [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.997783] env[61898]: DEBUG oslo_concurrency.lockutils [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.997892] env[61898]: DEBUG nova.objects.instance [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lazy-loading 'resources' on Instance uuid ceb13186-7bcb-44f4-9d92-d3a4dd02ac78 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1235.531940] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0b1623-65ae-4132-8548-aab853f3c73b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.539689] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68dca9c6-16ab-4a2a-ab6f-9dcdaff3056a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.570213] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd34545-0e33-4349-8fa7-7c04c520a91f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.577261] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42f6cff-fda5-4806-b83b-290fa843699c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.590139] env[61898]: DEBUG nova.compute.provider_tree [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1236.092962] env[61898]: DEBUG nova.scheduler.client.report [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1236.598484] env[61898]: DEBUG oslo_concurrency.lockutils [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1236.618118] env[61898]: INFO nova.scheduler.client.report [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleted allocations for instance ceb13186-7bcb-44f4-9d92-d3a4dd02ac78 [ 1237.126595] env[61898]: DEBUG oslo_concurrency.lockutils [None req-55f2f766-4f86-46d5-9418-06a62aaa11c3 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "ceb13186-7bcb-44f4-9d92-d3a4dd02ac78" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.677s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.594607] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.594601] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1239.097356] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.097604] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.097777] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.097937] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1239.099221] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a6f893-da61-4fb3-9b5a-20619e25303b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.107196] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d54326-471a-4d45-bca7-89eae1a2e7ec {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.119997] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b03f908-b315-4162-8321-d5972963b139 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.126039] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fcb69d-a9ba-40e6-8da1-d76fb4e533bd {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.154604] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181294MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1239.154758] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.154985] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1239.501112] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "7d282e20-7c3a-4b12-a79f-af41677562f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1239.501458] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.004323] env[61898]: DEBUG nova.compute.manager [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Starting instance... {{(pid=61898) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2438}} [ 1240.524275] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.679957] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 7d282e20-7c3a-4b12-a79f-af41677562f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.680243] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1240.680405] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1240.705629] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf77196-6371-4ef8-b9a0-a1cd849684b5 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.713507] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bafb2ae-abe2-48dc-af8f-4c1608711c46 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.743369] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35eeddcf-7fbc-4a19-80aa-8286e11c027f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.749904] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3327883-788e-4957-ad77-ae729b6ecc15 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.762417] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.265199] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1241.769953] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1241.770223] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.615s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.770501] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.246s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.772145] env[61898]: INFO nova.compute.claims [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1242.770009] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1242.770362] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1242.770476] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1242.804921] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa0be27-e239-4a32-adbd-40fe406408e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.812364] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a14fe8b-8714-4bbb-afd7-7ea833bb7916 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.842147] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41914a70-485e-49d0-a4ab-3a78a631093e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.848524] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69296d1-ebf4-41d4-8fb2-d80c74da14f0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.861184] env[61898]: DEBUG nova.compute.provider_tree [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1243.273259] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Didn't find any instances for network info cache update. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 1243.273525] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.273706] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.273910] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.274063] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61898) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1243.364436] env[61898]: DEBUG nova.scheduler.client.report [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1243.595298] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1243.869799] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.099s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1243.870402] env[61898]: DEBUG nova.compute.manager [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Start building networks asynchronously for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2835}} [ 1244.376054] env[61898]: DEBUG nova.compute.utils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1244.376820] env[61898]: DEBUG nova.compute.manager [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Allocating IP information in the background. {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1244.377929] env[61898]: DEBUG nova.network.neutron [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] allocate_for_instance() {{(pid=61898) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1244.425727] env[61898]: DEBUG nova.policy [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b53a1aca504e4b7593420e25dd8602f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00a5473d225540e186d6778172a187cb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61898) authorize /opt/stack/nova/nova/policy.py:201}} [ 1244.595105] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1244.689102] env[61898]: DEBUG nova.network.neutron [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Successfully created port: c96739c9-b918-458f-a763-5e41c42b698b {{(pid=61898) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1244.881130] env[61898]: DEBUG nova.compute.manager [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Start building block device mappings for instance. {{(pid=61898) _build_resources /opt/stack/nova/nova/compute/manager.py:2870}} [ 1245.891465] env[61898]: DEBUG nova.compute.manager [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Start spawning the instance on the hypervisor. {{(pid=61898) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2644}} [ 1245.917174] env[61898]: DEBUG nova.virt.hardware [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-10T11:52:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-10T11:52:34Z,direct_url=,disk_format='vmdk',id=e07a6c11-ab12-4187-81fc-1a28a9d1e65d,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='839a307abb65497eb273f288c364478a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-10T11:52:34Z,virtual_size=,visibility=), allow threads: False {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1245.917444] env[61898]: DEBUG nova.virt.hardware [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Flavor limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1245.917604] env[61898]: DEBUG nova.virt.hardware [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Image limits 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1245.917788] env[61898]: DEBUG nova.virt.hardware [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Flavor pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1245.917938] env[61898]: DEBUG nova.virt.hardware [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Image pref 0:0:0 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1245.918107] env[61898]: DEBUG nova.virt.hardware [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61898) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1245.918318] env[61898]: DEBUG nova.virt.hardware [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1245.918480] env[61898]: DEBUG nova.virt.hardware [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1245.918645] env[61898]: DEBUG nova.virt.hardware [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Got 1 possible topologies {{(pid=61898) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1245.918808] env[61898]: DEBUG nova.virt.hardware [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1245.918981] env[61898]: DEBUG nova.virt.hardware [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61898) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1245.919852] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0c589b-3845-4d25-bd43-ab20572a1023 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.927603] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f245d3-aaac-49b3-a3cd-e7c7d673afed {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.049874] env[61898]: DEBUG nova.compute.manager [req-9ba6d2c9-0e91-42bb-8278-a6872909df72 req-54671240-7a53-4658-816d-2f4faf2c2277 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Received event network-vif-plugged-c96739c9-b918-458f-a763-5e41c42b698b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1246.050124] env[61898]: DEBUG oslo_concurrency.lockutils [req-9ba6d2c9-0e91-42bb-8278-a6872909df72 req-54671240-7a53-4658-816d-2f4faf2c2277 service nova] Acquiring lock "7d282e20-7c3a-4b12-a79f-af41677562f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.050351] env[61898]: DEBUG oslo_concurrency.lockutils [req-9ba6d2c9-0e91-42bb-8278-a6872909df72 req-54671240-7a53-4658-816d-2f4faf2c2277 service nova] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.050590] env[61898]: DEBUG oslo_concurrency.lockutils [req-9ba6d2c9-0e91-42bb-8278-a6872909df72 req-54671240-7a53-4658-816d-2f4faf2c2277 service nova] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.050707] env[61898]: DEBUG nova.compute.manager [req-9ba6d2c9-0e91-42bb-8278-a6872909df72 req-54671240-7a53-4658-816d-2f4faf2c2277 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] No waiting events found dispatching network-vif-plugged-c96739c9-b918-458f-a763-5e41c42b698b {{(pid=61898) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1246.050880] env[61898]: WARNING nova.compute.manager [req-9ba6d2c9-0e91-42bb-8278-a6872909df72 req-54671240-7a53-4658-816d-2f4faf2c2277 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Received unexpected event network-vif-plugged-c96739c9-b918-458f-a763-5e41c42b698b for instance with vm_state building and task_state spawning. [ 1246.131368] env[61898]: DEBUG nova.network.neutron [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Successfully updated port: c96739c9-b918-458f-a763-5e41c42b698b {{(pid=61898) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1246.634090] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "refresh_cache-7d282e20-7c3a-4b12-a79f-af41677562f6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.634275] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "refresh_cache-7d282e20-7c3a-4b12-a79f-af41677562f6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.634428] env[61898]: DEBUG nova.network.neutron [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Building network info cache for instance {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1247.166027] env[61898]: DEBUG nova.network.neutron [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Instance cache missing network info. {{(pid=61898) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1247.282667] env[61898]: DEBUG nova.network.neutron [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Updating instance_info_cache with network_info: [{"id": "c96739c9-b918-458f-a763-5e41c42b698b", "address": "fa:16:3e:bf:5a:d6", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc96739c9-b9", "ovs_interfaceid": "c96739c9-b918-458f-a763-5e41c42b698b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.785717] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "refresh_cache-7d282e20-7c3a-4b12-a79f-af41677562f6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1247.785942] env[61898]: DEBUG nova.compute.manager [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Instance network_info: |[{"id": "c96739c9-b918-458f-a763-5e41c42b698b", "address": "fa:16:3e:bf:5a:d6", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc96739c9-b9", "ovs_interfaceid": "c96739c9-b918-458f-a763-5e41c42b698b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61898) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2002}} [ 1247.786408] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:5a:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '418ddd3d-5f64-407e-8e0c-c8b81639bee9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c96739c9-b918-458f-a763-5e41c42b698b', 'vif_model': 'vmxnet3'}] {{(pid=61898) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1247.793887] env[61898]: DEBUG oslo.service.loopingcall [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1247.794126] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Creating VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1247.794355] env[61898]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3392895d-e20a-4508-a66c-53cc70b8ac37 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.814350] env[61898]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1247.814350] env[61898]: value = "task-1241371" [ 1247.814350] env[61898]: _type = "Task" [ 1247.814350] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.825109] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241371, 'name': CreateVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.078984] env[61898]: DEBUG nova.compute.manager [req-6958d499-0b39-4481-ae15-177e062e1c37 req-174be243-5428-454f-8652-d08ce87261b9 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Received event network-changed-c96739c9-b918-458f-a763-5e41c42b698b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1248.079210] env[61898]: DEBUG nova.compute.manager [req-6958d499-0b39-4481-ae15-177e062e1c37 req-174be243-5428-454f-8652-d08ce87261b9 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Refreshing instance network info cache due to event network-changed-c96739c9-b918-458f-a763-5e41c42b698b. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1248.079436] env[61898]: DEBUG oslo_concurrency.lockutils [req-6958d499-0b39-4481-ae15-177e062e1c37 req-174be243-5428-454f-8652-d08ce87261b9 service nova] Acquiring lock "refresh_cache-7d282e20-7c3a-4b12-a79f-af41677562f6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.079580] env[61898]: DEBUG oslo_concurrency.lockutils [req-6958d499-0b39-4481-ae15-177e062e1c37 req-174be243-5428-454f-8652-d08ce87261b9 service nova] Acquired lock "refresh_cache-7d282e20-7c3a-4b12-a79f-af41677562f6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.079746] env[61898]: DEBUG nova.network.neutron [req-6958d499-0b39-4481-ae15-177e062e1c37 req-174be243-5428-454f-8652-d08ce87261b9 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Refreshing network info cache for port c96739c9-b918-458f-a763-5e41c42b698b {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1248.324484] env[61898]: DEBUG oslo_vmware.api [-] Task: {'id': task-1241371, 'name': CreateVM_Task, 'duration_secs': 0.2776} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.324842] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Created VM on the ESX host {{(pid=61898) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1248.325288] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.325454] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.325776] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1248.326035] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0eb7698d-b62f-4568-bf17-f677ede9f82d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.330222] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1248.330222] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]52b333f0-6ec2-dc8e-5f7a-e4b89a8e6c77" [ 1248.330222] env[61898]: _type = "Task" [ 1248.330222] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.337285] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b333f0-6ec2-dc8e-5f7a-e4b89a8e6c77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.840463] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]52b333f0-6ec2-dc8e-5f7a-e4b89a8e6c77, 'name': SearchDatastore_Task, 'duration_secs': 0.010513} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.840776] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.841035] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Processing image e07a6c11-ab12-4187-81fc-1a28a9d1e65d {{(pid=61898) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1248.841280] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.841470] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.841605] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1248.841873] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e809f2b1-3f3a-4dc0-82d1-39621816f0e3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.849568] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=61898) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1248.849748] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=61898) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1248.850459] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e84c540-1c95-4bcf-b7a8-722c7ea9dccc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.855178] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1248.855178] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]5282a190-0990-f1cb-3c08-bcc090ba6269" [ 1248.855178] env[61898]: _type = "Task" [ 1248.855178] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.862425] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5282a190-0990-f1cb-3c08-bcc090ba6269, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.988370] env[61898]: DEBUG nova.network.neutron [req-6958d499-0b39-4481-ae15-177e062e1c37 req-174be243-5428-454f-8652-d08ce87261b9 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Updated VIF entry in instance network info cache for port c96739c9-b918-458f-a763-5e41c42b698b. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1248.988734] env[61898]: DEBUG nova.network.neutron [req-6958d499-0b39-4481-ae15-177e062e1c37 req-174be243-5428-454f-8652-d08ce87261b9 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Updating instance_info_cache with network_info: [{"id": "c96739c9-b918-458f-a763-5e41c42b698b", "address": "fa:16:3e:bf:5a:d6", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc96739c9-b9", "ovs_interfaceid": "c96739c9-b918-458f-a763-5e41c42b698b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.366876] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]5282a190-0990-f1cb-3c08-bcc090ba6269, 'name': SearchDatastore_Task, 'duration_secs': 0.007188} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.367651] env[61898]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c317dd4-ec24-419b-959e-31d9a3f90844 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.372779] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1249.372779] env[61898]: value = "session[52794197-29f0-ea69-2b8e-12812988d1d1]521cad9d-1753-57aa-bdd8-0e48d6d4860a" [ 1249.372779] env[61898]: _type = "Task" [ 1249.372779] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.380294] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521cad9d-1753-57aa-bdd8-0e48d6d4860a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.491066] env[61898]: DEBUG oslo_concurrency.lockutils [req-6958d499-0b39-4481-ae15-177e062e1c37 req-174be243-5428-454f-8652-d08ce87261b9 service nova] Releasing lock "refresh_cache-7d282e20-7c3a-4b12-a79f-af41677562f6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1249.883507] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': session[52794197-29f0-ea69-2b8e-12812988d1d1]521cad9d-1753-57aa-bdd8-0e48d6d4860a, 'name': SearchDatastore_Task, 'duration_secs': 0.009046} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.883781] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1249.884098] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 7d282e20-7c3a-4b12-a79f-af41677562f6/7d282e20-7c3a-4b12-a79f-af41677562f6.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1249.884356] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-67df83bf-c1cc-4f52-be03-53a4ef5c2494 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.890358] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1249.890358] env[61898]: value = "task-1241372" [ 1249.890358] env[61898]: _type = "Task" [ 1249.890358] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.897479] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.400365] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.405395} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.400774] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/e07a6c11-ab12-4187-81fc-1a28a9d1e65d/e07a6c11-ab12-4187-81fc-1a28a9d1e65d.vmdk to [datastore2] 7d282e20-7c3a-4b12-a79f-af41677562f6/7d282e20-7c3a-4b12-a79f-af41677562f6.vmdk {{(pid=61898) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1250.400855] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Extending root virtual disk to 1048576 {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1250.401067] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dc6de0a0-bc42-4f4b-b511-3e2c7b675d82 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.407338] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1250.407338] env[61898]: value = "task-1241373" [ 1250.407338] env[61898]: _type = "Task" [ 1250.407338] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.414381] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241373, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.916719] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241373, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061284} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.916983] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Extended root virtual disk {{(pid=61898) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1250.917751] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ab89a9-7ee2-4554-80ef-d2fa34862e82 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.938761] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 7d282e20-7c3a-4b12-a79f-af41677562f6/7d282e20-7c3a-4b12-a79f-af41677562f6.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1250.939032] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8155a4e2-fbc0-47da-84d5-47c6797f9cf9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.958526] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1250.958526] env[61898]: value = "task-1241374" [ 1250.958526] env[61898]: _type = "Task" [ 1250.958526] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.965976] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241374, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.469476] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241374, 'name': ReconfigVM_Task, 'duration_secs': 0.25862} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.469867] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 7d282e20-7c3a-4b12-a79f-af41677562f6/7d282e20-7c3a-4b12-a79f-af41677562f6.vmdk or device None with type sparse {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1251.470412] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c9184eb-960f-4fcc-96b4-ed283ea2a24d {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.476731] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1251.476731] env[61898]: value = "task-1241375" [ 1251.476731] env[61898]: _type = "Task" [ 1251.476731] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.483849] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241375, 'name': Rename_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.985971] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241375, 'name': Rename_Task, 'duration_secs': 0.1345} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.986284] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Powering on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1251.986545] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-512d3b8b-9b23-40e7-8bcf-f8153566e1b9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.992404] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1251.992404] env[61898]: value = "task-1241376" [ 1251.992404] env[61898]: _type = "Task" [ 1251.992404] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.999484] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241376, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.503596] env[61898]: DEBUG oslo_vmware.api [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241376, 'name': PowerOnVM_Task, 'duration_secs': 0.430772} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.504035] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Powered on the VM {{(pid=61898) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1252.504079] env[61898]: INFO nova.compute.manager [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Took 6.61 seconds to spawn the instance on the hypervisor. [ 1252.504248] env[61898]: DEBUG nova.compute.manager [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Checking state {{(pid=61898) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1252.504979] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b53b0b-53ce-47a8-9c77-ec6305970bcc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.022945] env[61898]: INFO nova.compute.manager [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Took 12.51 seconds to build instance. [ 1253.525187] env[61898]: DEBUG oslo_concurrency.lockutils [None req-c053f18c-310a-46f6-a136-89089f0fa0a1 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.024s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.774814] env[61898]: DEBUG nova.compute.manager [req-9f47f893-0ae4-4cb2-b8a3-83965a092469 req-ea03d2ed-d894-4f23-8410-d02944b8a1ca service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Received event network-changed-c96739c9-b918-458f-a763-5e41c42b698b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1253.774944] env[61898]: DEBUG nova.compute.manager [req-9f47f893-0ae4-4cb2-b8a3-83965a092469 req-ea03d2ed-d894-4f23-8410-d02944b8a1ca service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Refreshing instance network info cache due to event network-changed-c96739c9-b918-458f-a763-5e41c42b698b. {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11465}} [ 1253.775216] env[61898]: DEBUG oslo_concurrency.lockutils [req-9f47f893-0ae4-4cb2-b8a3-83965a092469 req-ea03d2ed-d894-4f23-8410-d02944b8a1ca service nova] Acquiring lock "refresh_cache-7d282e20-7c3a-4b12-a79f-af41677562f6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1253.775336] env[61898]: DEBUG oslo_concurrency.lockutils [req-9f47f893-0ae4-4cb2-b8a3-83965a092469 req-ea03d2ed-d894-4f23-8410-d02944b8a1ca service nova] Acquired lock "refresh_cache-7d282e20-7c3a-4b12-a79f-af41677562f6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.775521] env[61898]: DEBUG nova.network.neutron [req-9f47f893-0ae4-4cb2-b8a3-83965a092469 req-ea03d2ed-d894-4f23-8410-d02944b8a1ca service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Refreshing network info cache for port c96739c9-b918-458f-a763-5e41c42b698b {{(pid=61898) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1254.478688] env[61898]: DEBUG nova.network.neutron [req-9f47f893-0ae4-4cb2-b8a3-83965a092469 req-ea03d2ed-d894-4f23-8410-d02944b8a1ca service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Updated VIF entry in instance network info cache for port c96739c9-b918-458f-a763-5e41c42b698b. {{(pid=61898) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1254.479070] env[61898]: DEBUG nova.network.neutron [req-9f47f893-0ae4-4cb2-b8a3-83965a092469 req-ea03d2ed-d894-4f23-8410-d02944b8a1ca service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Updating instance_info_cache with network_info: [{"id": "c96739c9-b918-458f-a763-5e41c42b698b", "address": "fa:16:3e:bf:5a:d6", "network": {"id": "b237f6a1-4e04-4e82-9b29-2955d7c5a9e1", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1940296274-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a5473d225540e186d6778172a187cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "418ddd3d-5f64-407e-8e0c-c8b81639bee9", "external-id": "nsx-vlan-transportzone-107", "segmentation_id": 107, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc96739c9-b9", "ovs_interfaceid": "c96739c9-b918-458f-a763-5e41c42b698b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.981463] env[61898]: DEBUG oslo_concurrency.lockutils [req-9f47f893-0ae4-4cb2-b8a3-83965a092469 req-ea03d2ed-d894-4f23-8410-d02944b8a1ca service nova] Releasing lock "refresh_cache-7d282e20-7c3a-4b12-a79f-af41677562f6" {{(pid=61898) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1290.475249] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "7d282e20-7c3a-4b12-a79f-af41677562f6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.475551] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.979052] env[61898]: DEBUG nova.compute.utils [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Using /dev/sd instead of None {{(pid=61898) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1291.481865] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.540121] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "7d282e20-7c3a-4b12-a79f-af41677562f6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.540414] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.540687] env[61898]: INFO nova.compute.manager [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Attaching volume d524fb67-0122-400b-a8fa-611117708142 to /dev/sdb [ 1292.570329] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a7a4be-072c-4e74-8e2b-304cc2a928fc {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.577720] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549f3de2-8a22-4368-9b41-98916a17328a {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.590509] env[61898]: DEBUG nova.virt.block_device [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Updating existing volume attachment record: 5aa9339d-cf65-44ac-99cb-f8f932c300dd {{(pid=61898) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1297.132605] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Volume attach. Driver type: vmdk {{(pid=61898) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1297.132895] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267748', 'volume_id': 'd524fb67-0122-400b-a8fa-611117708142', 'name': 'volume-d524fb67-0122-400b-a8fa-611117708142', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7d282e20-7c3a-4b12-a79f-af41677562f6', 'attached_at': '', 'detached_at': '', 'volume_id': 'd524fb67-0122-400b-a8fa-611117708142', 'serial': 'd524fb67-0122-400b-a8fa-611117708142'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1297.133787] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae3b382-3d00-4e50-b5a8-edac4ce93ffa {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.150061] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2194658e-f6b8-4191-ba4d-ebac5a7cfb90 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.173236] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] volume-d524fb67-0122-400b-a8fa-611117708142/volume-d524fb67-0122-400b-a8fa-611117708142.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1297.173461] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20d9afe3-7f96-4626-a181-a81f1a43142f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.190731] env[61898]: DEBUG oslo_vmware.api [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1297.190731] env[61898]: value = "task-1241379" [ 1297.190731] env[61898]: _type = "Task" [ 1297.190731] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.198020] env[61898]: DEBUG oslo_vmware.api [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241379, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.700477] env[61898]: DEBUG oslo_vmware.api [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241379, 'name': ReconfigVM_Task, 'duration_secs': 0.330408} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.700771] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Reconfigured VM instance instance-00000071 to attach disk [datastore2] volume-d524fb67-0122-400b-a8fa-611117708142/volume-d524fb67-0122-400b-a8fa-611117708142.vmdk or device None with type thin {{(pid=61898) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1297.705380] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5f2caaa-d245-4604-b60b-93adb8c9e0ea {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.719534] env[61898]: DEBUG oslo_vmware.api [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1297.719534] env[61898]: value = "task-1241380" [ 1297.719534] env[61898]: _type = "Task" [ 1297.719534] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.726547] env[61898]: DEBUG oslo_vmware.api [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241380, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.229068] env[61898]: DEBUG oslo_vmware.api [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241380, 'name': ReconfigVM_Task, 'duration_secs': 0.132861} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.229399] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267748', 'volume_id': 'd524fb67-0122-400b-a8fa-611117708142', 'name': 'volume-d524fb67-0122-400b-a8fa-611117708142', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7d282e20-7c3a-4b12-a79f-af41677562f6', 'attached_at': '', 'detached_at': '', 'volume_id': 'd524fb67-0122-400b-a8fa-611117708142', 'serial': 'd524fb67-0122-400b-a8fa-611117708142'} {{(pid=61898) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1298.595019] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1298.595270] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1299.099678] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.099930] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.100081] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.100238] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61898) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1299.101212] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e52683-429a-4feb-b523-5f81b27d8403 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.109364] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9727e9-524e-44a5-858d-0fd612aca010 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.122539] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff90c53b-2b38-4d63-83c2-bc339ebc2dd7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.128436] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c315b1ce-2e5b-4a5a-9456-804546937fe7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.156204] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181351MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=61898) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1299.156351] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.156525] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.263964] env[61898]: DEBUG nova.objects.instance [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lazy-loading 'flavor' on Instance uuid 7d282e20-7c3a-4b12-a79f-af41677562f6 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1299.768486] env[61898]: DEBUG oslo_concurrency.lockutils [None req-1ee30e3a-01bc-41c2-8624-a44ed7cf40c8 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.228s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.949676] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "7d282e20-7c3a-4b12-a79f-af41677562f6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.949954] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.180269] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Instance 7d282e20-7c3a-4b12-a79f-af41677562f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61898) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1300.180473] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1300.180619] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61898) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1300.204945] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c8180c-9c5e-4df9-9db5-4c562dfc0490 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.212407] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32fb8d8-05e3-4d3e-a9c0-1f73a0eb708b {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.241372] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e3d112-2373-4b02-8dd7-9ebee5b44a4c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.248457] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14eddfcd-5ec2-4f04-9bf8-114d847390c0 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.261352] env[61898]: DEBUG nova.compute.provider_tree [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1300.452653] env[61898]: INFO nova.compute.manager [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Detaching volume d524fb67-0122-400b-a8fa-611117708142 [ 1300.481330] env[61898]: INFO nova.virt.block_device [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Attempting to driver detach volume d524fb67-0122-400b-a8fa-611117708142 from mountpoint /dev/sdb [ 1300.481579] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Volume detach. Driver type: vmdk {{(pid=61898) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1300.481773] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267748', 'volume_id': 'd524fb67-0122-400b-a8fa-611117708142', 'name': 'volume-d524fb67-0122-400b-a8fa-611117708142', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7d282e20-7c3a-4b12-a79f-af41677562f6', 'attached_at': '', 'detached_at': '', 'volume_id': 'd524fb67-0122-400b-a8fa-611117708142', 'serial': 'd524fb67-0122-400b-a8fa-611117708142'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1300.482687] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc2b501-0017-495c-a6e3-f3637c98d9b6 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.503559] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1e5d37-9f24-4579-8101-ad2b9887d93f {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.510355] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b887827-e839-4a3d-9aac-a9161b0614f9 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.531218] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070b5b3b-a627-48f7-a609-303c2bbb4cf7 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.545407] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] The volume has not been displaced from its original location: [datastore2] volume-d524fb67-0122-400b-a8fa-611117708142/volume-d524fb67-0122-400b-a8fa-611117708142.vmdk. No consolidation needed. {{(pid=61898) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1300.550424] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1300.550687] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc049f94-2d40-47c4-969d-a0057f903f85 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.567559] env[61898]: DEBUG oslo_vmware.api [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1300.567559] env[61898]: value = "task-1241381" [ 1300.567559] env[61898]: _type = "Task" [ 1300.567559] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.574621] env[61898]: DEBUG oslo_vmware.api [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241381, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.764982] env[61898]: DEBUG nova.scheduler.client.report [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1301.076862] env[61898]: DEBUG oslo_vmware.api [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241381, 'name': ReconfigVM_Task, 'duration_secs': 0.190644} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.077136] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=61898) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1301.081668] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90a283cf-23c9-45c4-bf29-bfa8a6c0312e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.096070] env[61898]: DEBUG oslo_vmware.api [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1301.096070] env[61898]: value = "task-1241382" [ 1301.096070] env[61898]: _type = "Task" [ 1301.096070] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.103501] env[61898]: DEBUG oslo_vmware.api [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241382, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.269658] env[61898]: DEBUG nova.compute.resource_tracker [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61898) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1301.269861] env[61898]: DEBUG oslo_concurrency.lockutils [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.113s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1301.605883] env[61898]: DEBUG oslo_vmware.api [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241382, 'name': ReconfigVM_Task, 'duration_secs': 0.12909} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.606380] env[61898]: DEBUG nova.virt.vmwareapi.volumeops [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-267748', 'volume_id': 'd524fb67-0122-400b-a8fa-611117708142', 'name': 'volume-d524fb67-0122-400b-a8fa-611117708142', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '7d282e20-7c3a-4b12-a79f-af41677562f6', 'attached_at': '', 'detached_at': '', 'volume_id': 'd524fb67-0122-400b-a8fa-611117708142', 'serial': 'd524fb67-0122-400b-a8fa-611117708142'} {{(pid=61898) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1302.146115] env[61898]: DEBUG nova.objects.instance [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lazy-loading 'flavor' on Instance uuid 7d282e20-7c3a-4b12-a79f-af41677562f6 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1303.153378] env[61898]: DEBUG oslo_concurrency.lockutils [None req-fccdec61-12c9-4f87-b57b-bde36559f2aa tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.203s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1303.264673] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1303.264905] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1303.768843] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1303.769092] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Starting heal instance info cache {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10257}} [ 1303.770045] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Rebuilding the list of instances to heal {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10261}} [ 1304.179938] env[61898]: DEBUG oslo_concurrency.lockutils [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "7d282e20-7c3a-4b12-a79f-af41677562f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.180343] env[61898]: DEBUG oslo_concurrency.lockutils [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.180393] env[61898]: DEBUG oslo_concurrency.lockutils [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "7d282e20-7c3a-4b12-a79f-af41677562f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.180546] env[61898]: DEBUG oslo_concurrency.lockutils [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.180722] env[61898]: DEBUG oslo_concurrency.lockutils [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.182973] env[61898]: INFO nova.compute.manager [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Terminating instance [ 1304.272325] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Skipping network cache update for instance because it is being deleted. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10274}} [ 1304.272503] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Didn't find any instances for network info cache update. {{(pid=61898) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10343}} [ 1304.272688] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.272852] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.273019] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.273172] env[61898]: DEBUG nova.compute.manager [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61898) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10876}} [ 1304.595438] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.595673] env[61898]: DEBUG oslo_service.periodic_task [None req-a68cad7f-cacc-402d-9c5b-982f911c45f5 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61898) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.686708] env[61898]: DEBUG nova.compute.manager [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Start destroying the instance on the hypervisor. {{(pid=61898) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3163}} [ 1304.686945] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Destroying instance {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1304.687838] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c09411b-dd4a-438d-b86b-6a05edd48229 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.695684] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Powering off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1304.695907] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92ffc155-af92-4c42-a981-26dee0e82d85 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.701470] env[61898]: DEBUG oslo_vmware.api [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1304.701470] env[61898]: value = "task-1241383" [ 1304.701470] env[61898]: _type = "Task" [ 1304.701470] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.710166] env[61898]: DEBUG oslo_vmware.api [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.211992] env[61898]: DEBUG oslo_vmware.api [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241383, 'name': PowerOffVM_Task, 'duration_secs': 0.17188} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.212368] env[61898]: DEBUG nova.virt.vmwareapi.vm_util [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Powered off the VM {{(pid=61898) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1305.212417] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Unregistering the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1305.212638] env[61898]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ca074f6-beaa-41ab-a58c-62dbcec10a40 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.271939] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Unregistered the VM {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1305.272184] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Deleting contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1305.272375] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleting the datastore file [datastore2] 7d282e20-7c3a-4b12-a79f-af41677562f6 {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1305.272644] env[61898]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a4c55cb-89a9-466f-b9dc-44098e970e78 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.279357] env[61898]: DEBUG oslo_vmware.api [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for the task: (returnval){ [ 1305.279357] env[61898]: value = "task-1241385" [ 1305.279357] env[61898]: _type = "Task" [ 1305.279357] env[61898]: } to complete. {{(pid=61898) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.286689] env[61898]: DEBUG oslo_vmware.api [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241385, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.788897] env[61898]: DEBUG oslo_vmware.api [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Task: {'id': task-1241385, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155785} completed successfully. {{(pid=61898) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.789172] env[61898]: DEBUG nova.virt.vmwareapi.ds_util [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleted the datastore file {{(pid=61898) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1305.789368] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Deleted contents of the VM from datastore datastore2 {{(pid=61898) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1305.789547] env[61898]: DEBUG nova.virt.vmwareapi.vmops [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Instance destroyed {{(pid=61898) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1305.789726] env[61898]: INFO nova.compute.manager [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1305.789976] env[61898]: DEBUG oslo.service.loopingcall [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61898) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1305.790184] env[61898]: DEBUG nova.compute.manager [-] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Deallocating network for instance {{(pid=61898) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2295}} [ 1305.790279] env[61898]: DEBUG nova.network.neutron [-] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] deallocate_for_instance() {{(pid=61898) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1306.252874] env[61898]: DEBUG nova.compute.manager [req-54a97437-8011-465d-89b5-b761c4fb36b8 req-8525e34d-0190-46ed-8ca5-fc9b51e8be87 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Received event network-vif-deleted-c96739c9-b918-458f-a763-5e41c42b698b {{(pid=61898) external_instance_event /opt/stack/nova/nova/compute/manager.py:11460}} [ 1306.253148] env[61898]: INFO nova.compute.manager [req-54a97437-8011-465d-89b5-b761c4fb36b8 req-8525e34d-0190-46ed-8ca5-fc9b51e8be87 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Neutron deleted interface c96739c9-b918-458f-a763-5e41c42b698b; detaching it from the instance and deleting it from the info cache [ 1306.253335] env[61898]: DEBUG nova.network.neutron [req-54a97437-8011-465d-89b5-b761c4fb36b8 req-8525e34d-0190-46ed-8ca5-fc9b51e8be87 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.727816] env[61898]: DEBUG nova.network.neutron [-] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Updating instance_info_cache with network_info: [] {{(pid=61898) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.755357] env[61898]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db1c7c75-f8c6-4815-b3dd-b2cc8b5da8d3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.765627] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063d7b49-52f7-4b88-8a2b-ee4e0289eb3e {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.788451] env[61898]: DEBUG nova.compute.manager [req-54a97437-8011-465d-89b5-b761c4fb36b8 req-8525e34d-0190-46ed-8ca5-fc9b51e8be87 service nova] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Detach interface failed, port_id=c96739c9-b918-458f-a763-5e41c42b698b, reason: Instance 7d282e20-7c3a-4b12-a79f-af41677562f6 could not be found. {{(pid=61898) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11294}} [ 1307.232110] env[61898]: INFO nova.compute.manager [-] [instance: 7d282e20-7c3a-4b12-a79f-af41677562f6] Took 1.44 seconds to deallocate network for instance. [ 1307.738063] env[61898]: DEBUG oslo_concurrency.lockutils [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.738423] env[61898]: DEBUG oslo_concurrency.lockutils [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.738520] env[61898]: DEBUG nova.objects.instance [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lazy-loading 'resources' on Instance uuid 7d282e20-7c3a-4b12-a79f-af41677562f6 {{(pid=61898) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1308.272812] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5e81f2-8b8b-429f-b60b-7b6aea4cdbab {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.280017] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477da6a3-d962-486b-bb5d-3c5f980a3f2c {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.310999] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfad6006-689d-4996-bb48-0be35d5c4804 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.320036] env[61898]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528f2aad-3c8f-4cf5-9d01-6767b4083df3 {{(pid=61898) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.335105] env[61898]: DEBUG nova.compute.provider_tree [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed in ProviderTree for provider: 79886f75-94e9-4bf0-9cbd-87f3715d3144 {{(pid=61898) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1308.838213] env[61898]: DEBUG nova.scheduler.client.report [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Inventory has not changed for provider 79886f75-94e9-4bf0-9cbd-87f3715d3144 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61898) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1309.344043] env[61898]: DEBUG oslo_concurrency.lockutils [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.605s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1309.363598] env[61898]: INFO nova.scheduler.client.report [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Deleted allocations for instance 7d282e20-7c3a-4b12-a79f-af41677562f6 [ 1309.872280] env[61898]: DEBUG oslo_concurrency.lockutils [None req-21fc35b5-8ba2-450a-809a-99a2e7bc5c29 tempest-AttachVolumeNegativeTest-1242975886 tempest-AttachVolumeNegativeTest-1242975886-project-member] Lock "7d282e20-7c3a-4b12-a79f-af41677562f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.692s {{(pid=61898) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}